opkg 0.11.2 → 0.11.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -2
- package/package.json +12 -1
- package/packages/cli/dist/{add-GJYCLX7R.js → add-S3TXZ7PY.js} +10 -10
- package/packages/cli/dist/{chunk-Q4L55OR7.js → chunk-34ABTOH6.js} +2 -2
- package/packages/cli/dist/{chunk-TMBWLJM6.js → chunk-3H2IZLIF.js} +2 -2
- package/packages/cli/dist/{chunk-JEO6LVPR.js → chunk-5FE6RV3U.js} +2 -2
- package/packages/cli/dist/{chunk-FKPRQFGS.js → chunk-74ECD7IG.js} +2 -2
- package/packages/cli/dist/{chunk-623RDPFN.js → chunk-7Y5YLMSY.js} +2 -2
- package/packages/cli/dist/{chunk-3TR4FXOO.js → chunk-CFG5HDE3.js} +3 -3
- package/packages/cli/dist/{chunk-6LGM6BXC.js → chunk-HBZNMQQR.js} +3 -3
- package/packages/cli/dist/{chunk-L2AI56Y6.js → chunk-HFCRIXLN.js} +2 -2
- package/packages/cli/dist/{chunk-L2AI56Y6.js.map → chunk-HFCRIXLN.js.map} +1 -1
- package/packages/cli/dist/{chunk-XNC5UBAA.js → chunk-HVQ7GY5N.js} +12 -6
- package/packages/cli/dist/chunk-HVQ7GY5N.js.map +7 -0
- package/packages/cli/dist/{chunk-L5PUJVGZ.js → chunk-JX4Y6QJH.js} +2 -2
- package/packages/cli/dist/{chunk-4XPF676E.js → chunk-KFTWXB3Y.js} +3 -3
- package/packages/cli/dist/{chunk-V3QNOLM2.js → chunk-MXXYQE3B.js} +2 -2
- package/packages/cli/dist/{chunk-GKCVGADO.js → chunk-NYRU3BHW.js} +2 -2
- package/packages/cli/dist/{chunk-VGBYUNXA.js → chunk-PHIA5RBP.js} +2 -2
- package/packages/cli/dist/{chunk-73II4CTT.js → chunk-RRHHWPZW.js} +2 -2
- package/packages/cli/dist/{chunk-HEFHYKEV.js → chunk-ZGS7FV4C.js} +4 -4
- package/packages/cli/dist/{default-S2KVP6GL.js → default-U2DJBXF5.js} +8 -8
- package/packages/cli/dist/{fork-package-Q5PTK4VH.js → fork-package-RVF3K3UT.js} +6 -6
- package/packages/cli/dist/index.js +14 -14
- package/packages/cli/dist/{install-V4QUIYEF.js → install-ZSAOUP2Z.js} +8 -8
- package/packages/cli/dist/{list-YLJXUNOA.js → list-RPO3EAV4.js} +8 -8
- package/packages/cli/dist/{login-3JZGY7BL.js → login-EPUMFFAE.js} +5 -5
- package/packages/cli/dist/{logout-NY7ZZCWN.js → logout-L255ARWR.js} +3 -3
- package/packages/cli/dist/{move-XEQ7AIFY.js → move-N5DTFIGV.js} +9 -9
- package/packages/cli/dist/{new-WBA3L5FE.js → new-C52XLJRT.js} +2 -2
- package/packages/cli/dist/{publish-4PYKKKS7.js → publish-VKYJBIA5.js} +6 -6
- package/packages/cli/dist/{remove-KO6WX53L.js → remove-TM77NHV3.js} +7 -7
- package/packages/cli/dist/{resolve-named-dependency-UCMYGP7G.js → resolve-named-dependency-SUDVLL5Y.js} +6 -6
- package/packages/cli/dist/{resource-discoverer-57TCCJDN.js → resource-discoverer-MGVEBFMJ.js} +5 -5
- package/packages/cli/dist/{sync-ZHKH6KH3.js → sync-WFKBS4L2.js} +8 -8
- package/packages/cli/dist/{uninstall-2C6KORSJ.js → uninstall-STONXT6H.js} +5 -5
- package/packages/cli/dist/{view-BDBG4IP6.js → view-Y52V6DUL.js} +9 -9
- package/packages/core/dist/core/flows/flow-executor.js +10 -0
- package/packages/core/dist/core/flows/flow-executor.js.map +1 -1
- package/packages/core/dist/generated/version.js +1 -1
- package/packages/core/dist/utils/git-url-detection.js +6 -3
- package/packages/core/dist/utils/git-url-detection.js.map +1 -1
- package/platforms.jsonc +24 -0
- package/packages/cli/bin/openpackage +0 -5
- package/packages/cli/dist/chunk-XNC5UBAA.js.map +0 -7
- package/packages/cli/package.json +0 -48
- package/packages/core/dist/constants/index.d.ts +0 -185
- package/packages/core/dist/constants/index.d.ts.map +0 -1
- package/packages/core/dist/constants/workspace.d.ts +0 -4
- package/packages/core/dist/constants/workspace.d.ts.map +0 -1
- package/packages/core/dist/core/add/add-conflict-handler.d.ts +0 -17
- package/packages/core/dist/core/add/add-conflict-handler.d.ts.map +0 -1
- package/packages/core/dist/core/add/add-dependency-flow.d.ts +0 -14
- package/packages/core/dist/core/add/add-dependency-flow.d.ts.map +0 -1
- package/packages/core/dist/core/add/add-input-classifier.d.ts +0 -34
- package/packages/core/dist/core/add/add-input-classifier.d.ts.map +0 -1
- package/packages/core/dist/core/add/add-orchestrator.d.ts +0 -41
- package/packages/core/dist/core/add/add-orchestrator.d.ts.map +0 -1
- package/packages/core/dist/core/add/add-to-source-pipeline.d.ts +0 -31
- package/packages/core/dist/core/add/add-to-source-pipeline.d.ts.map +0 -1
- package/packages/core/dist/core/add/entry-renamer.d.ts +0 -25
- package/packages/core/dist/core/add/entry-renamer.d.ts.map +0 -1
- package/packages/core/dist/core/add/move-cleanup.d.ts +0 -27
- package/packages/core/dist/core/add/move-cleanup.d.ts.map +0 -1
- package/packages/core/dist/core/add/package-index-updater.d.ts +0 -24
- package/packages/core/dist/core/add/package-index-updater.d.ts.map +0 -1
- package/packages/core/dist/core/add/platform-path-transformer.d.ts +0 -7
- package/packages/core/dist/core/add/platform-path-transformer.d.ts.map +0 -1
- package/packages/core/dist/core/add/source-collector.d.ts +0 -23
- package/packages/core/dist/core/add/source-collector.d.ts.map +0 -1
- package/packages/core/dist/core/api-keys.d.ts +0 -21
- package/packages/core/dist/core/api-keys.d.ts.map +0 -1
- package/packages/core/dist/core/auth.d.ts +0 -32
- package/packages/core/dist/core/auth.d.ts.map +0 -1
- package/packages/core/dist/core/cache-manager.d.ts +0 -22
- package/packages/core/dist/core/cache-manager.d.ts.map +0 -1
- package/packages/core/dist/core/config.d.ts +0 -64
- package/packages/core/dist/core/config.d.ts.map +0 -1
- package/packages/core/dist/core/conversion-context/creation.d.ts +0 -50
- package/packages/core/dist/core/conversion-context/creation.d.ts.map +0 -1
- package/packages/core/dist/core/conversion-context/index.d.ts +0 -12
- package/packages/core/dist/core/conversion-context/index.d.ts.map +0 -1
- package/packages/core/dist/core/conversion-context/serialization.d.ts +0 -38
- package/packages/core/dist/core/conversion-context/serialization.d.ts.map +0 -1
- package/packages/core/dist/core/conversion-context/validation.d.ts +0 -49
- package/packages/core/dist/core/conversion-context/validation.d.ts.map +0 -1
- package/packages/core/dist/core/dependency-resolver/index.d.ts +0 -11
- package/packages/core/dist/core/dependency-resolver/index.d.ts.map +0 -1
- package/packages/core/dist/core/dependency-resolver/types.d.ts +0 -52
- package/packages/core/dist/core/dependency-resolver/types.d.ts.map +0 -1
- package/packages/core/dist/core/device-auth.d.ts +0 -25
- package/packages/core/dist/core/device-auth.d.ts.map +0 -1
- package/packages/core/dist/core/directory.d.ts +0 -74
- package/packages/core/dist/core/directory.d.ts.map +0 -1
- package/packages/core/dist/core/discovery/file-discovery.d.ts +0 -17
- package/packages/core/dist/core/discovery/file-discovery.d.ts.map +0 -1
- package/packages/core/dist/core/discovery/platform-files-discovery.d.ts +0 -3
- package/packages/core/dist/core/discovery/platform-files-discovery.d.ts.map +0 -1
- package/packages/core/dist/core/execution-context.d.ts +0 -47
- package/packages/core/dist/core/execution-context.d.ts.map +0 -1
- package/packages/core/dist/core/flows/flow-condition-evaluator.d.ts +0 -29
- package/packages/core/dist/core/flows/flow-condition-evaluator.d.ts.map +0 -1
- package/packages/core/dist/core/flows/flow-execution-coordinator.d.ts +0 -88
- package/packages/core/dist/core/flows/flow-execution-coordinator.d.ts.map +0 -1
- package/packages/core/dist/core/flows/flow-executor.d.ts +0 -198
- package/packages/core/dist/core/flows/flow-executor.d.ts.map +0 -1
- package/packages/core/dist/core/flows/flow-key-extractor.d.ts +0 -40
- package/packages/core/dist/core/flows/flow-key-extractor.d.ts.map +0 -1
- package/packages/core/dist/core/flows/flow-key-mapper.d.ts +0 -27
- package/packages/core/dist/core/flows/flow-key-mapper.d.ts.map +0 -1
- package/packages/core/dist/core/flows/flow-source-discovery.d.ts +0 -107
- package/packages/core/dist/core/flows/flow-source-discovery.d.ts.map +0 -1
- package/packages/core/dist/core/flows/flow-transforms.d.ts +0 -183
- package/packages/core/dist/core/flows/flow-transforms.d.ts.map +0 -1
- package/packages/core/dist/core/flows/import-pipeline.d.ts +0 -66
- package/packages/core/dist/core/flows/import-pipeline.d.ts.map +0 -1
- package/packages/core/dist/core/flows/map-pipeline/context.d.ts +0 -30
- package/packages/core/dist/core/flows/map-pipeline/context.d.ts.map +0 -1
- package/packages/core/dist/core/flows/map-pipeline/index.d.ts +0 -48
- package/packages/core/dist/core/flows/map-pipeline/index.d.ts.map +0 -1
- package/packages/core/dist/core/flows/map-pipeline/operations/copy.d.ts +0 -32
- package/packages/core/dist/core/flows/map-pipeline/operations/copy.d.ts.map +0 -1
- package/packages/core/dist/core/flows/map-pipeline/operations/pipe.d.ts +0 -37
- package/packages/core/dist/core/flows/map-pipeline/operations/pipe.d.ts.map +0 -1
- package/packages/core/dist/core/flows/map-pipeline/operations/rename.d.ts +0 -26
- package/packages/core/dist/core/flows/map-pipeline/operations/rename.d.ts.map +0 -1
- package/packages/core/dist/core/flows/map-pipeline/operations/set.d.ts +0 -24
- package/packages/core/dist/core/flows/map-pipeline/operations/set.d.ts.map +0 -1
- package/packages/core/dist/core/flows/map-pipeline/operations/switch.d.ts +0 -31
- package/packages/core/dist/core/flows/map-pipeline/operations/switch.d.ts.map +0 -1
- package/packages/core/dist/core/flows/map-pipeline/operations/transform.d.ts +0 -22
- package/packages/core/dist/core/flows/map-pipeline/operations/transform.d.ts.map +0 -1
- package/packages/core/dist/core/flows/map-pipeline/operations/unset.d.ts +0 -27
- package/packages/core/dist/core/flows/map-pipeline/operations/unset.d.ts.map +0 -1
- package/packages/core/dist/core/flows/map-pipeline/types.d.ts +0 -358
- package/packages/core/dist/core/flows/map-pipeline/types.d.ts.map +0 -1
- package/packages/core/dist/core/flows/map-pipeline/utils.d.ts +0 -104
- package/packages/core/dist/core/flows/map-pipeline/utils.d.ts.map +0 -1
- package/packages/core/dist/core/flows/markdown.d.ts +0 -22
- package/packages/core/dist/core/flows/markdown.d.ts.map +0 -1
- package/packages/core/dist/core/flows/platform-converter.d.ts +0 -90
- package/packages/core/dist/core/flows/platform-converter.d.ts.map +0 -1
- package/packages/core/dist/core/flows/platform-suffix-handler.d.ts +0 -97
- package/packages/core/dist/core/flows/platform-suffix-handler.d.ts.map +0 -1
- package/packages/core/dist/core/flows/source-resolver.d.ts +0 -99
- package/packages/core/dist/core/flows/source-resolver.d.ts.map +0 -1
- package/packages/core/dist/core/flows/source-schema-validator.d.ts +0 -22
- package/packages/core/dist/core/flows/source-schema-validator.d.ts.map +0 -1
- package/packages/core/dist/core/flows/switch-resolver.d.ts +0 -63
- package/packages/core/dist/core/flows/switch-resolver.d.ts.map +0 -1
- package/packages/core/dist/core/flows/to-pattern-extractor.d.ts +0 -11
- package/packages/core/dist/core/flows/to-pattern-extractor.d.ts.map +0 -1
- package/packages/core/dist/core/fork-package.d.ts +0 -22
- package/packages/core/dist/core/fork-package.d.ts.map +0 -1
- package/packages/core/dist/core/git-clone-registry.d.ts +0 -19
- package/packages/core/dist/core/git-clone-registry.d.ts.map +0 -1
- package/packages/core/dist/core/git-clone.d.ts +0 -30
- package/packages/core/dist/core/git-clone.d.ts.map +0 -1
- package/packages/core/dist/core/glob-target-mapping.d.ts +0 -18
- package/packages/core/dist/core/glob-target-mapping.d.ts.map +0 -1
- package/packages/core/dist/core/http-client.d.ts +0 -54
- package/packages/core/dist/core/http-client.d.ts.map +0 -1
- package/packages/core/dist/core/install/ambiguity-prompts.d.ts +0 -46
- package/packages/core/dist/core/install/ambiguity-prompts.d.ts.map +0 -1
- package/packages/core/dist/core/install/base-detector.d.ts +0 -51
- package/packages/core/dist/core/install/base-detector.d.ts.map +0 -1
- package/packages/core/dist/core/install/conflicts/file-conflict-resolver.d.ts +0 -236
- package/packages/core/dist/core/install/conflicts/file-conflict-resolver.d.ts.map +0 -1
- package/packages/core/dist/core/install/conflicts/namespace-path.d.ts +0 -35
- package/packages/core/dist/core/install/conflicts/namespace-path.d.ts.map +0 -1
- package/packages/core/dist/core/install/convenience-matchers.d.ts +0 -83
- package/packages/core/dist/core/install/convenience-matchers.d.ts.map +0 -1
- package/packages/core/dist/core/install/conversion-context.d.ts +0 -146
- package/packages/core/dist/core/install/conversion-context.d.ts.map +0 -1
- package/packages/core/dist/core/install/conversion-coordinator.d.ts +0 -91
- package/packages/core/dist/core/install/conversion-coordinator.d.ts.map +0 -1
- package/packages/core/dist/core/install/detection-types.d.ts +0 -217
- package/packages/core/dist/core/install/detection-types.d.ts.map +0 -1
- package/packages/core/dist/core/install/download-keys.d.ts +0 -10
- package/packages/core/dist/core/install/download-keys.d.ts.map +0 -1
- package/packages/core/dist/core/install/file-format-detector.d.ts +0 -52
- package/packages/core/dist/core/install/file-format-detector.d.ts.map +0 -1
- package/packages/core/dist/core/install/file-updater.d.ts +0 -7
- package/packages/core/dist/core/install/file-updater.d.ts.map +0 -1
- package/packages/core/dist/core/install/flow-based-installer.d.ts +0 -30
- package/packages/core/dist/core/install/flow-based-installer.d.ts.map +0 -1
- package/packages/core/dist/core/install/flow-index-installer.d.ts +0 -55
- package/packages/core/dist/core/install/flow-index-installer.d.ts.map +0 -1
- package/packages/core/dist/core/install/format-detector.d.ts +0 -85
- package/packages/core/dist/core/install/format-detector.d.ts.map +0 -1
- package/packages/core/dist/core/install/format-distribution-analyzer.d.ts +0 -98
- package/packages/core/dist/core/install/format-distribution-analyzer.d.ts.map +0 -1
- package/packages/core/dist/core/install/format-group-merger.d.ts +0 -66
- package/packages/core/dist/core/install/format-group-merger.d.ts.map +0 -1
- package/packages/core/dist/core/install/git-package-loader.d.ts +0 -19
- package/packages/core/dist/core/install/git-package-loader.d.ts.map +0 -1
- package/packages/core/dist/core/install/handlers/index.d.ts +0 -5
- package/packages/core/dist/core/install/handlers/index.d.ts.map +0 -1
- package/packages/core/dist/core/install/helpers/conflict-detection.d.ts +0 -41
- package/packages/core/dist/core/install/helpers/conflict-detection.d.ts.map +0 -1
- package/packages/core/dist/core/install/helpers/file-discovery.d.ts +0 -8
- package/packages/core/dist/core/install/helpers/file-discovery.d.ts.map +0 -1
- package/packages/core/dist/core/install/helpers/format-detection.d.ts +0 -42
- package/packages/core/dist/core/install/helpers/format-detection.d.ts.map +0 -1
- package/packages/core/dist/core/install/helpers/index.d.ts +0 -10
- package/packages/core/dist/core/install/helpers/index.d.ts.map +0 -1
- package/packages/core/dist/core/install/helpers/result-aggregation.d.ts +0 -58
- package/packages/core/dist/core/install/helpers/result-aggregation.d.ts.map +0 -1
- package/packages/core/dist/core/install/helpers/result-logging.d.ts +0 -47
- package/packages/core/dist/core/install/helpers/result-logging.d.ts.map +0 -1
- package/packages/core/dist/core/install/import-flow-converter.d.ts +0 -89
- package/packages/core/dist/core/install/import-flow-converter.d.ts.map +0 -1
- package/packages/core/dist/core/install/index-based-installer.d.ts +0 -37
- package/packages/core/dist/core/install/index-based-installer.d.ts.map +0 -1
- package/packages/core/dist/core/install/input-classifier-base.d.ts +0 -72
- package/packages/core/dist/core/install/input-classifier-base.d.ts.map +0 -1
- package/packages/core/dist/core/install/install-errors.d.ts +0 -6
- package/packages/core/dist/core/install/install-errors.d.ts.map +0 -1
- package/packages/core/dist/core/install/install-helpers.d.ts +0 -36
- package/packages/core/dist/core/install/install-helpers.d.ts.map +0 -1
- package/packages/core/dist/core/install/install-reporting.d.ts +0 -48
- package/packages/core/dist/core/install/install-reporting.d.ts.map +0 -1
- package/packages/core/dist/core/install/list-handler.d.ts +0 -18
- package/packages/core/dist/core/install/list-handler.d.ts.map +0 -1
- package/packages/core/dist/core/install/local-source-resolution.d.ts +0 -29
- package/packages/core/dist/core/install/local-source-resolution.d.ts.map +0 -1
- package/packages/core/dist/core/install/marketplace-handler.d.ts +0 -129
- package/packages/core/dist/core/install/marketplace-handler.d.ts.map +0 -1
- package/packages/core/dist/core/install/operations/conflict-handler.d.ts +0 -14
- package/packages/core/dist/core/install/operations/conflict-handler.d.ts.map +0 -1
- package/packages/core/dist/core/install/operations/index.d.ts +0 -8
- package/packages/core/dist/core/install/operations/index.d.ts.map +0 -1
- package/packages/core/dist/core/install/operations/installation-executor.d.ts +0 -57
- package/packages/core/dist/core/install/operations/installation-executor.d.ts.map +0 -1
- package/packages/core/dist/core/install/operations/root-files.d.ts +0 -37
- package/packages/core/dist/core/install/operations/root-files.d.ts.map +0 -1
- package/packages/core/dist/core/install/orchestrator/index.d.ts +0 -5
- package/packages/core/dist/core/install/orchestrator/index.d.ts.map +0 -1
- package/packages/core/dist/core/install/orchestrator/orchestrator.d.ts +0 -90
- package/packages/core/dist/core/install/orchestrator/orchestrator.d.ts.map +0 -1
- package/packages/core/dist/core/install/orchestrator/strategies/base.d.ts +0 -35
- package/packages/core/dist/core/install/orchestrator/strategies/base.d.ts.map +0 -1
- package/packages/core/dist/core/install/orchestrator/strategies/bulk-strategy.d.ts +0 -18
- package/packages/core/dist/core/install/orchestrator/strategies/bulk-strategy.d.ts.map +0 -1
- package/packages/core/dist/core/install/orchestrator/strategies/embedded-strategy.d.ts +0 -22
- package/packages/core/dist/core/install/orchestrator/strategies/embedded-strategy.d.ts.map +0 -1
- package/packages/core/dist/core/install/orchestrator/strategies/git-strategy.d.ts +0 -17
- package/packages/core/dist/core/install/orchestrator/strategies/git-strategy.d.ts.map +0 -1
- package/packages/core/dist/core/install/orchestrator/strategies/index.d.ts +0 -12
- package/packages/core/dist/core/install/orchestrator/strategies/index.d.ts.map +0 -1
- package/packages/core/dist/core/install/orchestrator/strategies/path-strategy.d.ts +0 -20
- package/packages/core/dist/core/install/orchestrator/strategies/path-strategy.d.ts.map +0 -1
- package/packages/core/dist/core/install/orchestrator/strategies/registry-strategy.d.ts +0 -25
- package/packages/core/dist/core/install/orchestrator/strategies/registry-strategy.d.ts.map +0 -1
- package/packages/core/dist/core/install/orchestrator/subsumption-resolver.d.ts +0 -75
- package/packages/core/dist/core/install/orchestrator/subsumption-resolver.d.ts.map +0 -1
- package/packages/core/dist/core/install/orchestrator/types.d.ts +0 -118
- package/packages/core/dist/core/install/orchestrator/types.d.ts.map +0 -1
- package/packages/core/dist/core/install/package-input.d.ts +0 -28
- package/packages/core/dist/core/install/package-input.d.ts.map +0 -1
- package/packages/core/dist/core/install/package-installation.d.ts +0 -13
- package/packages/core/dist/core/install/package-installation.d.ts.map +0 -1
- package/packages/core/dist/core/install/package-marker-detector.d.ts +0 -93
- package/packages/core/dist/core/install/package-marker-detector.d.ts.map +0 -1
- package/packages/core/dist/core/install/path-package-loader.d.ts +0 -42
- package/packages/core/dist/core/install/path-package-loader.d.ts.map +0 -1
- package/packages/core/dist/core/install/platform-resolution.d.ts +0 -16
- package/packages/core/dist/core/install/platform-resolution.d.ts.map +0 -1
- package/packages/core/dist/core/install/plugin-detector.d.ts +0 -54
- package/packages/core/dist/core/install/plugin-detector.d.ts.map +0 -1
- package/packages/core/dist/core/install/plugin-metadata-resolver.d.ts +0 -62
- package/packages/core/dist/core/install/plugin-metadata-resolver.d.ts.map +0 -1
- package/packages/core/dist/core/install/plugin-sources.d.ts +0 -59
- package/packages/core/dist/core/install/plugin-sources.d.ts.map +0 -1
- package/packages/core/dist/core/install/plugin-transformer.d.ts +0 -29
- package/packages/core/dist/core/install/plugin-transformer.d.ts.map +0 -1
- package/packages/core/dist/core/install/preprocessing/base-resolver.d.ts +0 -61
- package/packages/core/dist/core/install/preprocessing/base-resolver.d.ts.map +0 -1
- package/packages/core/dist/core/install/preprocessing/context-population.d.ts +0 -18
- package/packages/core/dist/core/install/preprocessing/context-population.d.ts.map +0 -1
- package/packages/core/dist/core/install/preprocessing/convenience-preprocessor.d.ts +0 -34
- package/packages/core/dist/core/install/preprocessing/convenience-preprocessor.d.ts.map +0 -1
- package/packages/core/dist/core/install/preprocessing/index.d.ts +0 -6
- package/packages/core/dist/core/install/preprocessing/index.d.ts.map +0 -1
- package/packages/core/dist/core/install/preprocessing/input-classifier.d.ts +0 -21
- package/packages/core/dist/core/install/preprocessing/input-classifier.d.ts.map +0 -1
- package/packages/core/dist/core/install/preprocessing/options-normalizer.d.ts +0 -20
- package/packages/core/dist/core/install/preprocessing/options-normalizer.d.ts.map +0 -1
- package/packages/core/dist/core/install/remote-reporting.d.ts +0 -11
- package/packages/core/dist/core/install/remote-reporting.d.ts.map +0 -1
- package/packages/core/dist/core/install/resource-discoverer.d.ts +0 -21
- package/packages/core/dist/core/install/resource-discoverer.d.ts.map +0 -1
- package/packages/core/dist/core/install/resource-pattern-normalization.d.ts +0 -2
- package/packages/core/dist/core/install/resource-pattern-normalization.d.ts.map +0 -1
- package/packages/core/dist/core/install/resource-search.d.ts +0 -3
- package/packages/core/dist/core/install/resource-search.d.ts.map +0 -1
- package/packages/core/dist/core/install/resource-selection-menu.d.ts +0 -25
- package/packages/core/dist/core/install/resource-selection-menu.d.ts.map +0 -1
- package/packages/core/dist/core/install/resource-types.d.ts +0 -65
- package/packages/core/dist/core/install/resource-types.d.ts.map +0 -1
- package/packages/core/dist/core/install/schema-registry.d.ts +0 -71
- package/packages/core/dist/core/install/schema-registry.d.ts.map +0 -1
- package/packages/core/dist/core/install/sources/base.d.ts +0 -79
- package/packages/core/dist/core/install/sources/base.d.ts.map +0 -1
- package/packages/core/dist/core/install/sources/git-source.d.ts +0 -12
- package/packages/core/dist/core/install/sources/git-source.d.ts.map +0 -1
- package/packages/core/dist/core/install/sources/loader-factory.d.ts +0 -7
- package/packages/core/dist/core/install/sources/loader-factory.d.ts.map +0 -1
- package/packages/core/dist/core/install/sources/path-source.d.ts +0 -11
- package/packages/core/dist/core/install/sources/path-source.d.ts.map +0 -1
- package/packages/core/dist/core/install/sources/registry-source.d.ts +0 -12
- package/packages/core/dist/core/install/sources/registry-source.d.ts.map +0 -1
- package/packages/core/dist/core/install/sources/workspace-source.d.ts +0 -11
- package/packages/core/dist/core/install/sources/workspace-source.d.ts.map +0 -1
- package/packages/core/dist/core/install/stale-file-cleanup.d.ts +0 -30
- package/packages/core/dist/core/install/stale-file-cleanup.d.ts.map +0 -1
- package/packages/core/dist/core/install/strategies/base-strategy.d.ts +0 -45
- package/packages/core/dist/core/install/strategies/base-strategy.d.ts.map +0 -1
- package/packages/core/dist/core/install/strategies/conversion-strategy.d.ts +0 -43
- package/packages/core/dist/core/install/strategies/conversion-strategy.d.ts.map +0 -1
- package/packages/core/dist/core/install/strategies/flow-based-strategy.d.ts +0 -46
- package/packages/core/dist/core/install/strategies/flow-based-strategy.d.ts.map +0 -1
- package/packages/core/dist/core/install/strategies/helpers/flow-helpers.d.ts +0 -12
- package/packages/core/dist/core/install/strategies/helpers/flow-helpers.d.ts.map +0 -1
- package/packages/core/dist/core/install/strategies/helpers/platform-filtering.d.ts +0 -21
- package/packages/core/dist/core/install/strategies/helpers/platform-filtering.d.ts.map +0 -1
- package/packages/core/dist/core/install/strategies/helpers/result-converter.d.ts +0 -22
- package/packages/core/dist/core/install/strategies/helpers/result-converter.d.ts.map +0 -1
- package/packages/core/dist/core/install/strategies/helpers/temp-directory.d.ts +0 -85
- package/packages/core/dist/core/install/strategies/helpers/temp-directory.d.ts.map +0 -1
- package/packages/core/dist/core/install/strategies/index.d.ts +0 -16
- package/packages/core/dist/core/install/strategies/index.d.ts.map +0 -1
- package/packages/core/dist/core/install/strategies/strategy-selector.d.ts +0 -20
- package/packages/core/dist/core/install/strategies/strategy-selector.d.ts.map +0 -1
- package/packages/core/dist/core/install/strategies/types.d.ts +0 -110
- package/packages/core/dist/core/install/strategies/types.d.ts.map +0 -1
- package/packages/core/dist/core/install/types.d.ts +0 -12
- package/packages/core/dist/core/install/types.d.ts.map +0 -1
- package/packages/core/dist/core/install/unified/context-builders.d.ts +0 -61
- package/packages/core/dist/core/install/unified/context-builders.d.ts.map +0 -1
- package/packages/core/dist/core/install/unified/context-helpers.d.ts +0 -18
- package/packages/core/dist/core/install/unified/context-helpers.d.ts.map +0 -1
- package/packages/core/dist/core/install/unified/context.d.ts +0 -164
- package/packages/core/dist/core/install/unified/context.d.ts.map +0 -1
- package/packages/core/dist/core/install/unified/index.d.ts +0 -12
- package/packages/core/dist/core/install/unified/index.d.ts.map +0 -1
- package/packages/core/dist/core/install/unified/multi-context-pipeline.d.ts +0 -30
- package/packages/core/dist/core/install/unified/multi-context-pipeline.d.ts.map +0 -1
- package/packages/core/dist/core/install/unified/phases/conflicts.d.ts +0 -7
- package/packages/core/dist/core/install/unified/phases/conflicts.d.ts.map +0 -1
- package/packages/core/dist/core/install/unified/phases/convert.d.ts +0 -22
- package/packages/core/dist/core/install/unified/phases/convert.d.ts.map +0 -1
- package/packages/core/dist/core/install/unified/phases/execute.d.ts +0 -30
- package/packages/core/dist/core/install/unified/phases/execute.d.ts.map +0 -1
- package/packages/core/dist/core/install/unified/phases/load-package.d.ts +0 -11
- package/packages/core/dist/core/install/unified/phases/load-package.d.ts.map +0 -1
- package/packages/core/dist/core/install/unified/phases/manifest.d.ts +0 -7
- package/packages/core/dist/core/install/unified/phases/manifest.d.ts.map +0 -1
- package/packages/core/dist/core/install/unified/phases/report.d.ts +0 -11
- package/packages/core/dist/core/install/unified/phases/report.d.ts.map +0 -1
- package/packages/core/dist/core/install/unified/pipeline.d.ts +0 -27
- package/packages/core/dist/core/install/unified/pipeline.d.ts.map +0 -1
- package/packages/core/dist/core/install/validators/index.d.ts +0 -3
- package/packages/core/dist/core/install/validators/index.d.ts.map +0 -1
- package/packages/core/dist/core/install/validators/options-validator.d.ts +0 -16
- package/packages/core/dist/core/install/validators/options-validator.d.ts.map +0 -1
- package/packages/core/dist/core/install/validators/target-validator.d.ts +0 -6
- package/packages/core/dist/core/install/validators/target-validator.d.ts.map +0 -1
- package/packages/core/dist/core/install/version-selection.d.ts +0 -51
- package/packages/core/dist/core/install/version-selection.d.ts.map +0 -1
- package/packages/core/dist/core/install/wave-resolver/content-root-cache.d.ts +0 -26
- package/packages/core/dist/core/install/wave-resolver/content-root-cache.d.ts.map +0 -1
- package/packages/core/dist/core/install/wave-resolver/context-builder.d.ts +0 -39
- package/packages/core/dist/core/install/wave-resolver/context-builder.d.ts.map +0 -1
- package/packages/core/dist/core/install/wave-resolver/fetcher.d.ts +0 -49
- package/packages/core/dist/core/install/wave-resolver/fetcher.d.ts.map +0 -1
- package/packages/core/dist/core/install/wave-resolver/index-updater.d.ts +0 -23
- package/packages/core/dist/core/install/wave-resolver/index-updater.d.ts.map +0 -1
- package/packages/core/dist/core/install/wave-resolver/index-write-collector.d.ts +0 -110
- package/packages/core/dist/core/install/wave-resolver/index-write-collector.d.ts.map +0 -1
- package/packages/core/dist/core/install/wave-resolver/index.d.ts +0 -19
- package/packages/core/dist/core/install/wave-resolver/index.d.ts.map +0 -1
- package/packages/core/dist/core/install/wave-resolver/lockfile-resolver.d.ts +0 -28
- package/packages/core/dist/core/install/wave-resolver/lockfile-resolver.d.ts.map +0 -1
- package/packages/core/dist/core/install/wave-resolver/manifest-reader.d.ts +0 -34
- package/packages/core/dist/core/install/wave-resolver/manifest-reader.d.ts.map +0 -1
- package/packages/core/dist/core/install/wave-resolver/platform-filter.d.ts +0 -45
- package/packages/core/dist/core/install/wave-resolver/platform-filter.d.ts.map +0 -1
- package/packages/core/dist/core/install/wave-resolver/types.d.ts +0 -220
- package/packages/core/dist/core/install/wave-resolver/types.d.ts.map +0 -1
- package/packages/core/dist/core/install/wave-resolver/version-solver.d.ts +0 -65
- package/packages/core/dist/core/install/wave-resolver/version-solver.d.ts.map +0 -1
- package/packages/core/dist/core/install/wave-resolver/wave-engine.d.ts +0 -23
- package/packages/core/dist/core/install/wave-resolver/wave-engine.d.ts.map +0 -1
- package/packages/core/dist/core/install/wave-resolver/wave-installer.d.ts +0 -56
- package/packages/core/dist/core/install/wave-resolver/wave-installer.d.ts.map +0 -1
- package/packages/core/dist/core/interaction-policy.d.ts +0 -40
- package/packages/core/dist/core/interaction-policy.d.ts.map +0 -1
- package/packages/core/dist/core/list/content-status-checker.d.ts +0 -42
- package/packages/core/dist/core/list/content-status-checker.d.ts.map +0 -1
- package/packages/core/dist/core/list/list-pipeline.d.ts +0 -95
- package/packages/core/dist/core/list/list-pipeline.d.ts.map +0 -1
- package/packages/core/dist/core/list/list-printers.d.ts +0 -21
- package/packages/core/dist/core/list/list-printers.d.ts.map +0 -1
- package/packages/core/dist/core/list/list-tree-renderer.d.ts +0 -107
- package/packages/core/dist/core/list/list-tree-renderer.d.ts.map +0 -1
- package/packages/core/dist/core/list/remote-list-resolver.d.ts +0 -22
- package/packages/core/dist/core/list/remote-list-resolver.d.ts.map +0 -1
- package/packages/core/dist/core/list/scope-data-collector.d.ts +0 -72
- package/packages/core/dist/core/list/scope-data-collector.d.ts.map +0 -1
- package/packages/core/dist/core/list/untracked-files-scanner.d.ts +0 -62
- package/packages/core/dist/core/list/untracked-files-scanner.d.ts.map +0 -1
- package/packages/core/dist/core/list/view-metadata.d.ts +0 -12
- package/packages/core/dist/core/list/view-metadata.d.ts.map +0 -1
- package/packages/core/dist/core/markdown-frontmatter.d.ts +0 -54
- package/packages/core/dist/core/markdown-frontmatter.d.ts.map +0 -1
- package/packages/core/dist/core/move/move-pipeline.d.ts +0 -38
- package/packages/core/dist/core/move/move-pipeline.d.ts.map +0 -1
- package/packages/core/dist/core/move/move-rename-executor.d.ts +0 -22
- package/packages/core/dist/core/move/move-rename-executor.d.ts.map +0 -1
- package/packages/core/dist/core/move/move-validator.d.ts +0 -14
- package/packages/core/dist/core/move/move-validator.d.ts.map +0 -1
- package/packages/core/dist/core/move/move-workspace-rename-executor.d.ts +0 -27
- package/packages/core/dist/core/move/move-workspace-rename-executor.d.ts.map +0 -1
- package/packages/core/dist/core/openpackage.d.ts +0 -46
- package/packages/core/dist/core/openpackage.d.ts.map +0 -1
- package/packages/core/dist/core/package-context.d.ts +0 -99
- package/packages/core/dist/core/package-context.d.ts.map +0 -1
- package/packages/core/dist/core/package-creation.d.ts +0 -45
- package/packages/core/dist/core/package-creation.d.ts.map +0 -1
- package/packages/core/dist/core/package-management.d.ts +0 -69
- package/packages/core/dist/core/package-management.d.ts.map +0 -1
- package/packages/core/dist/core/package-name-resolution.d.ts +0 -110
- package/packages/core/dist/core/package-name-resolution.d.ts.map +0 -1
- package/packages/core/dist/core/package-versioning.d.ts +0 -37
- package/packages/core/dist/core/package-versioning.d.ts.map +0 -1
- package/packages/core/dist/core/package.d.ts +0 -50
- package/packages/core/dist/core/package.d.ts.map +0 -1
- package/packages/core/dist/core/platform/directory-preservation.d.ts +0 -52
- package/packages/core/dist/core/platform/directory-preservation.d.ts.map +0 -1
- package/packages/core/dist/core/platform/platform-disambiguation.d.ts +0 -26
- package/packages/core/dist/core/platform/platform-disambiguation.d.ts.map +0 -1
- package/packages/core/dist/core/platform/platform-file.d.ts +0 -42
- package/packages/core/dist/core/platform/platform-file.d.ts.map +0 -1
- package/packages/core/dist/core/platform/platform-mapper.d.ts +0 -76
- package/packages/core/dist/core/platform/platform-mapper.d.ts.map +0 -1
- package/packages/core/dist/core/platform/platform-path-utils.d.ts +0 -28
- package/packages/core/dist/core/platform/platform-path-utils.d.ts.map +0 -1
- package/packages/core/dist/core/platform/platform-root-files.d.ts +0 -13
- package/packages/core/dist/core/platform/platform-root-files.d.ts.map +0 -1
- package/packages/core/dist/core/platform/platform-specific-paths.d.ts +0 -21
- package/packages/core/dist/core/platform/platform-specific-paths.d.ts.map +0 -1
- package/packages/core/dist/core/platform/platform-utils.d.ts +0 -23
- package/packages/core/dist/core/platform/platform-utils.d.ts.map +0 -1
- package/packages/core/dist/core/platform/registry-entry-filter.d.ts +0 -29
- package/packages/core/dist/core/platform/registry-entry-filter.d.ts.map +0 -1
- package/packages/core/dist/core/platform/root-file-uninstaller.d.ts +0 -15
- package/packages/core/dist/core/platform/root-file-uninstaller.d.ts.map +0 -1
- package/packages/core/dist/core/platform-yaml-merge.d.ts +0 -9
- package/packages/core/dist/core/platform-yaml-merge.d.ts.map +0 -1
- package/packages/core/dist/core/platforms.d.ts +0 -197
- package/packages/core/dist/core/platforms.d.ts.map +0 -1
- package/packages/core/dist/core/ports/buffered-output.d.ts +0 -36
- package/packages/core/dist/core/ports/buffered-output.d.ts.map +0 -1
- package/packages/core/dist/core/ports/console-output.d.ts +0 -15
- package/packages/core/dist/core/ports/console-output.d.ts.map +0 -1
- package/packages/core/dist/core/ports/console-progress.d.ts +0 -21
- package/packages/core/dist/core/ports/console-progress.d.ts.map +0 -1
- package/packages/core/dist/core/ports/console-prompt.d.ts +0 -13
- package/packages/core/dist/core/ports/console-prompt.d.ts.map +0 -1
- package/packages/core/dist/core/ports/index.d.ts +0 -15
- package/packages/core/dist/core/ports/index.d.ts.map +0 -1
- package/packages/core/dist/core/ports/output.d.ts +0 -50
- package/packages/core/dist/core/ports/output.d.ts.map +0 -1
- package/packages/core/dist/core/ports/progress.d.ts +0 -131
- package/packages/core/dist/core/ports/progress.d.ts.map +0 -1
- package/packages/core/dist/core/ports/prompt.d.ts +0 -64
- package/packages/core/dist/core/ports/prompt.d.ts.map +0 -1
- package/packages/core/dist/core/ports/resolve.d.ts +0 -37
- package/packages/core/dist/core/ports/resolve.d.ts.map +0 -1
- package/packages/core/dist/core/profiles.d.ts +0 -54
- package/packages/core/dist/core/profiles.d.ts.map +0 -1
- package/packages/core/dist/core/publish/local-publish-pipeline.d.ts +0 -17
- package/packages/core/dist/core/publish/local-publish-pipeline.d.ts.map +0 -1
- package/packages/core/dist/core/publish/publish-errors.d.ts +0 -8
- package/packages/core/dist/core/publish/publish-errors.d.ts.map +0 -1
- package/packages/core/dist/core/publish/publish-output.d.ts +0 -29
- package/packages/core/dist/core/publish/publish-output.d.ts.map +0 -1
- package/packages/core/dist/core/publish/publish-pipeline.d.ts +0 -7
- package/packages/core/dist/core/publish/publish-pipeline.d.ts.map +0 -1
- package/packages/core/dist/core/publish/publish-types.d.ts +0 -18
- package/packages/core/dist/core/publish/publish-types.d.ts.map +0 -1
- package/packages/core/dist/core/publish/publish-upload.d.ts +0 -9
- package/packages/core/dist/core/publish/publish-upload.d.ts.map +0 -1
- package/packages/core/dist/core/registry-writer.d.ts +0 -22
- package/packages/core/dist/core/registry-writer.d.ts.map +0 -1
- package/packages/core/dist/core/remote-pull.d.ts +0 -96
- package/packages/core/dist/core/remote-pull.d.ts.map +0 -1
- package/packages/core/dist/core/remove/removal-collector.d.ts +0 -13
- package/packages/core/dist/core/remove/removal-collector.d.ts.map +0 -1
- package/packages/core/dist/core/remove/removal-confirmation.d.ts +0 -29
- package/packages/core/dist/core/remove/removal-confirmation.d.ts.map +0 -1
- package/packages/core/dist/core/remove/remove-dependency-flow.d.ts +0 -18
- package/packages/core/dist/core/remove/remove-dependency-flow.d.ts.map +0 -1
- package/packages/core/dist/core/remove/remove-from-source-pipeline.d.ts +0 -32
- package/packages/core/dist/core/remove/remove-from-source-pipeline.d.ts.map +0 -1
- package/packages/core/dist/core/remove/remove-input-classifier.d.ts +0 -22
- package/packages/core/dist/core/remove/remove-input-classifier.d.ts.map +0 -1
- package/packages/core/dist/core/resources/disambiguation-prompt.d.ts +0 -38
- package/packages/core/dist/core/resources/disambiguation-prompt.d.ts.map +0 -1
- package/packages/core/dist/core/resources/installed-resources.d.ts +0 -4
- package/packages/core/dist/core/resources/installed-resources.d.ts.map +0 -1
- package/packages/core/dist/core/resources/markdown-metadata.d.ts +0 -7
- package/packages/core/dist/core/resources/markdown-metadata.d.ts.map +0 -1
- package/packages/core/dist/core/resources/resource-builder.d.ts +0 -53
- package/packages/core/dist/core/resources/resource-builder.d.ts.map +0 -1
- package/packages/core/dist/core/resources/resource-catalog.d.ts +0 -26
- package/packages/core/dist/core/resources/resource-catalog.d.ts.map +0 -1
- package/packages/core/dist/core/resources/resource-classifier.d.ts +0 -59
- package/packages/core/dist/core/resources/resource-classifier.d.ts.map +0 -1
- package/packages/core/dist/core/resources/resource-namespace.d.ts +0 -40
- package/packages/core/dist/core/resources/resource-namespace.d.ts.map +0 -1
- package/packages/core/dist/core/resources/resource-naming.d.ts +0 -5
- package/packages/core/dist/core/resources/resource-naming.d.ts.map +0 -1
- package/packages/core/dist/core/resources/resource-provenance.d.ts +0 -29
- package/packages/core/dist/core/resources/resource-provenance.d.ts.map +0 -1
- package/packages/core/dist/core/resources/resource-query.d.ts +0 -22
- package/packages/core/dist/core/resources/resource-query.d.ts.map +0 -1
- package/packages/core/dist/core/resources/resource-registry.d.ts +0 -30
- package/packages/core/dist/core/resources/resource-registry.d.ts.map +0 -1
- package/packages/core/dist/core/resources/resource-resolver.d.ts +0 -67
- package/packages/core/dist/core/resources/resource-resolver.d.ts.map +0 -1
- package/packages/core/dist/core/resources/resource-spec.d.ts +0 -58
- package/packages/core/dist/core/resources/resource-spec.d.ts.map +0 -1
- package/packages/core/dist/core/resources/scope-traversal.d.ts +0 -39
- package/packages/core/dist/core/resources/scope-traversal.d.ts.map +0 -1
- package/packages/core/dist/core/resources/workspace-resource-discovery.d.ts +0 -25
- package/packages/core/dist/core/resources/workspace-resource-discovery.d.ts.map +0 -1
- package/packages/core/dist/core/save/save-candidate-builder.d.ts +0 -55
- package/packages/core/dist/core/save/save-candidate-builder.d.ts.map +0 -1
- package/packages/core/dist/core/save/save-conflict-analyzer.d.ts +0 -116
- package/packages/core/dist/core/save/save-conflict-analyzer.d.ts.map +0 -1
- package/packages/core/dist/core/save/save-conversion-helper.d.ts +0 -84
- package/packages/core/dist/core/save/save-conversion-helper.d.ts.map +0 -1
- package/packages/core/dist/core/save/save-group-builder.d.ts +0 -49
- package/packages/core/dist/core/save/save-group-builder.d.ts.map +0 -1
- package/packages/core/dist/core/save/save-interactive-resolver.d.ts +0 -49
- package/packages/core/dist/core/save/save-interactive-resolver.d.ts.map +0 -1
- package/packages/core/dist/core/save/save-merge-extractor.d.ts +0 -72
- package/packages/core/dist/core/save/save-merge-extractor.d.ts.map +0 -1
- package/packages/core/dist/core/save/save-new-file-detector.d.ts +0 -46
- package/packages/core/dist/core/save/save-new-file-detector.d.ts.map +0 -1
- package/packages/core/dist/core/save/save-options-normalizer.d.ts +0 -26
- package/packages/core/dist/core/save/save-options-normalizer.d.ts.map +0 -1
- package/packages/core/dist/core/save/save-platform-handler.d.ts +0 -51
- package/packages/core/dist/core/save/save-platform-handler.d.ts.map +0 -1
- package/packages/core/dist/core/save/save-resolution-executor.d.ts +0 -39
- package/packages/core/dist/core/save/save-resolution-executor.d.ts.map +0 -1
- package/packages/core/dist/core/save/save-result-reporter.d.ts +0 -100
- package/packages/core/dist/core/save/save-result-reporter.d.ts.map +0 -1
- package/packages/core/dist/core/save/save-to-source-pipeline.d.ts +0 -43
- package/packages/core/dist/core/save/save-to-source-pipeline.d.ts.map +0 -1
- package/packages/core/dist/core/save/save-types.d.ts +0 -175
- package/packages/core/dist/core/save/save-types.d.ts.map +0 -1
- package/packages/core/dist/core/save/save-write-coordinator.d.ts +0 -36
- package/packages/core/dist/core/save/save-write-coordinator.d.ts.map +0 -1
- package/packages/core/dist/core/schema/schema-printers.d.ts +0 -12
- package/packages/core/dist/core/schema/schema-printers.d.ts.map +0 -1
- package/packages/core/dist/core/schema/schema-query.d.ts +0 -48
- package/packages/core/dist/core/schema/schema-query.d.ts.map +0 -1
- package/packages/core/dist/core/scope-resolution.d.ts +0 -57
- package/packages/core/dist/core/scope-resolution.d.ts.map +0 -1
- package/packages/core/dist/core/scoping/package-scoping.d.ts +0 -35
- package/packages/core/dist/core/scoping/package-scoping.d.ts.map +0 -1
- package/packages/core/dist/core/search/search-pipeline.d.ts +0 -55
- package/packages/core/dist/core/search/search-pipeline.d.ts.map +0 -1
- package/packages/core/dist/core/set/set-output.d.ts +0 -23
- package/packages/core/dist/core/set/set-output.d.ts.map +0 -1
- package/packages/core/dist/core/set/set-pipeline.d.ts +0 -13
- package/packages/core/dist/core/set/set-pipeline.d.ts.map +0 -1
- package/packages/core/dist/core/set/set-types.d.ts +0 -52
- package/packages/core/dist/core/set/set-types.d.ts.map +0 -1
- package/packages/core/dist/core/source-mutability.d.ts +0 -31
- package/packages/core/dist/core/source-mutability.d.ts.map +0 -1
- package/packages/core/dist/core/source-resolution/dependency-graph.d.ts +0 -8
- package/packages/core/dist/core/source-resolution/dependency-graph.d.ts.map +0 -1
- package/packages/core/dist/core/source-resolution/resolve-mutable-source.d.ts +0 -31
- package/packages/core/dist/core/source-resolution/resolve-mutable-source.d.ts.map +0 -1
- package/packages/core/dist/core/source-resolution/resolve-named-dependency.d.ts +0 -22
- package/packages/core/dist/core/source-resolution/resolve-named-dependency.d.ts.map +0 -1
- package/packages/core/dist/core/source-resolution/resolve-package-source.d.ts +0 -3
- package/packages/core/dist/core/source-resolution/resolve-package-source.d.ts.map +0 -1
- package/packages/core/dist/core/source-resolution/resolve-registry-version.d.ts +0 -31
- package/packages/core/dist/core/source-resolution/resolve-registry-version.d.ts.map +0 -1
- package/packages/core/dist/core/source-resolution/types.d.ts +0 -39
- package/packages/core/dist/core/source-resolution/types.d.ts.map +0 -1
- package/packages/core/dist/core/sync/sync-conflict-resolver.d.ts +0 -20
- package/packages/core/dist/core/sync/sync-conflict-resolver.d.ts.map +0 -1
- package/packages/core/dist/core/sync/sync-direct-flow.d.ts +0 -28
- package/packages/core/dist/core/sync/sync-direct-flow.d.ts.map +0 -1
- package/packages/core/dist/core/sync/sync-discovery.d.ts +0 -21
- package/packages/core/dist/core/sync/sync-discovery.d.ts.map +0 -1
- package/packages/core/dist/core/sync/sync-options-normalizer.d.ts +0 -30
- package/packages/core/dist/core/sync/sync-options-normalizer.d.ts.map +0 -1
- package/packages/core/dist/core/sync/sync-pipeline.d.ts +0 -22
- package/packages/core/dist/core/sync/sync-pipeline.d.ts.map +0 -1
- package/packages/core/dist/core/sync/sync-pull-executor.d.ts +0 -18
- package/packages/core/dist/core/sync/sync-pull-executor.d.ts.map +0 -1
- package/packages/core/dist/core/sync/sync-pull-new-executor.d.ts +0 -14
- package/packages/core/dist/core/sync/sync-pull-new-executor.d.ts.map +0 -1
- package/packages/core/dist/core/sync/sync-pull-pipeline-runner.d.ts +0 -27
- package/packages/core/dist/core/sync/sync-pull-pipeline-runner.d.ts.map +0 -1
- package/packages/core/dist/core/sync/sync-push-delete-executor.d.ts +0 -21
- package/packages/core/dist/core/sync/sync-push-delete-executor.d.ts.map +0 -1
- package/packages/core/dist/core/sync/sync-push-executor.d.ts +0 -24
- package/packages/core/dist/core/sync/sync-push-executor.d.ts.map +0 -1
- package/packages/core/dist/core/sync/sync-remove-executor.d.ts +0 -20
- package/packages/core/dist/core/sync/sync-remove-executor.d.ts.map +0 -1
- package/packages/core/dist/core/sync/sync-result-reporter.d.ts +0 -23
- package/packages/core/dist/core/sync/sync-result-reporter.d.ts.map +0 -1
- package/packages/core/dist/core/sync/sync-source-scanner.d.ts +0 -25
- package/packages/core/dist/core/sync/sync-source-scanner.d.ts.map +0 -1
- package/packages/core/dist/core/sync/sync-status-classifier.d.ts +0 -20
- package/packages/core/dist/core/sync/sync-status-classifier.d.ts.map +0 -1
- package/packages/core/dist/core/sync/sync-types.d.ts +0 -100
- package/packages/core/dist/core/sync/sync-types.d.ts.map +0 -1
- package/packages/core/dist/core/sync/sync-version-checker.d.ts +0 -75
- package/packages/core/dist/core/sync/sync-version-checker.d.ts.map +0 -1
- package/packages/core/dist/core/sync/sync-version-resolver.d.ts +0 -24
- package/packages/core/dist/core/sync/sync-version-resolver.d.ts.map +0 -1
- package/packages/core/dist/core/sync/sync-workspace-deleted-detector.d.ts +0 -18
- package/packages/core/dist/core/sync/sync-workspace-deleted-detector.d.ts.map +0 -1
- package/packages/core/dist/core/telemetry.d.ts +0 -59
- package/packages/core/dist/core/telemetry.d.ts.map +0 -1
- package/packages/core/dist/core/uninstall/direct-uninstall-flow.d.ts +0 -41
- package/packages/core/dist/core/uninstall/direct-uninstall-flow.d.ts.map +0 -1
- package/packages/core/dist/core/uninstall/flow-aware-uninstaller.d.ts +0 -34
- package/packages/core/dist/core/uninstall/flow-aware-uninstaller.d.ts.map +0 -1
- package/packages/core/dist/core/uninstall/uninstall-executor.d.ts +0 -15
- package/packages/core/dist/core/uninstall/uninstall-executor.d.ts.map +0 -1
- package/packages/core/dist/core/uninstall/uninstall-pipeline.d.ts +0 -8
- package/packages/core/dist/core/uninstall/uninstall-pipeline.d.ts.map +0 -1
- package/packages/core/dist/core/uninstall/uninstall-reporter.d.ts +0 -26
- package/packages/core/dist/core/uninstall/uninstall-reporter.d.ts.map +0 -1
- package/packages/core/dist/core/uninstall/workspace-resource-collector.d.ts +0 -68
- package/packages/core/dist/core/uninstall/workspace-resource-collector.d.ts.map +0 -1
- package/packages/core/dist/core/universal-patterns.d.ts +0 -26
- package/packages/core/dist/core/universal-patterns.d.ts.map +0 -1
- package/packages/core/dist/core/unpublish/interactive-unpublish-flow.d.ts +0 -33
- package/packages/core/dist/core/unpublish/interactive-unpublish-flow.d.ts.map +0 -1
- package/packages/core/dist/core/unpublish/local-unpublish-pipeline.d.ts +0 -8
- package/packages/core/dist/core/unpublish/local-unpublish-pipeline.d.ts.map +0 -1
- package/packages/core/dist/core/unpublish/unpublish-output.d.ts +0 -7
- package/packages/core/dist/core/unpublish/unpublish-output.d.ts.map +0 -1
- package/packages/core/dist/core/unpublish/unpublish-pipeline.d.ts +0 -6
- package/packages/core/dist/core/unpublish/unpublish-pipeline.d.ts.map +0 -1
- package/packages/core/dist/core/unpublish/unpublish-types.d.ts +0 -17
- package/packages/core/dist/core/unpublish/unpublish-types.d.ts.map +0 -1
- package/packages/core/dist/core/view/view-pipeline.d.ts +0 -76
- package/packages/core/dist/core/view/view-pipeline.d.ts.map +0 -1
- package/packages/core/dist/core/view/view-printers.d.ts +0 -21
- package/packages/core/dist/core/view/view-printers.d.ts.map +0 -1
- package/packages/core/dist/core/workspace-package-context.d.ts +0 -54
- package/packages/core/dist/core/workspace-package-context.d.ts.map +0 -1
- package/packages/core/dist/generated/version.d.ts +0 -2
- package/packages/core/dist/generated/version.d.ts.map +0 -1
- package/packages/core/dist/index.d.ts +0 -51
- package/packages/core/dist/index.d.ts.map +0 -1
- package/packages/core/dist/types/api.d.ts +0 -47
- package/packages/core/dist/types/api.d.ts.map +0 -1
- package/packages/core/dist/types/conversion-context.d.ts +0 -175
- package/packages/core/dist/types/conversion-context.d.ts.map +0 -1
- package/packages/core/dist/types/execution-context.d.ts +0 -152
- package/packages/core/dist/types/execution-context.d.ts.map +0 -1
- package/packages/core/dist/types/flows.d.ts +0 -370
- package/packages/core/dist/types/flows.d.ts.map +0 -1
- package/packages/core/dist/types/index.d.ts +0 -273
- package/packages/core/dist/types/index.d.ts.map +0 -1
- package/packages/core/dist/types/install.d.ts +0 -5
- package/packages/core/dist/types/install.d.ts.map +0 -1
- package/packages/core/dist/types/lockfile.d.ts +0 -26
- package/packages/core/dist/types/lockfile.d.ts.map +0 -1
- package/packages/core/dist/types/platform-flows.d.ts +0 -349
- package/packages/core/dist/types/platform-flows.d.ts.map +0 -1
- package/packages/core/dist/types/platform.d.ts +0 -32
- package/packages/core/dist/types/platform.d.ts.map +0 -1
- package/packages/core/dist/types/resources.d.ts +0 -16
- package/packages/core/dist/types/resources.d.ts.map +0 -1
- package/packages/core/dist/types/workspace-index.d.ts +0 -86
- package/packages/core/dist/types/workspace-index.d.ts.map +0 -1
- package/packages/core/dist/utils/cleanup-empty-parents.d.ts +0 -20
- package/packages/core/dist/utils/cleanup-empty-parents.d.ts.map +0 -1
- package/packages/core/dist/utils/collections.d.ts +0 -6
- package/packages/core/dist/utils/collections.d.ts.map +0 -1
- package/packages/core/dist/utils/concurrency-pool.d.ts +0 -34
- package/packages/core/dist/utils/concurrency-pool.d.ts.map +0 -1
- package/packages/core/dist/utils/custom-path-resolution.d.ts +0 -69
- package/packages/core/dist/utils/custom-path-resolution.d.ts.map +0 -1
- package/packages/core/dist/utils/entity-detector.d.ts +0 -42
- package/packages/core/dist/utils/entity-detector.d.ts.map +0 -1
- package/packages/core/dist/utils/error-reasons.d.ts +0 -9
- package/packages/core/dist/utils/error-reasons.d.ts.map +0 -1
- package/packages/core/dist/utils/errors.d.ts +0 -36
- package/packages/core/dist/utils/errors.d.ts.map +0 -1
- package/packages/core/dist/utils/expand-directory-selections.d.ts +0 -37
- package/packages/core/dist/utils/expand-directory-selections.d.ts.map +0 -1
- package/packages/core/dist/utils/file-processing.d.ts +0 -26
- package/packages/core/dist/utils/file-processing.d.ts.map +0 -1
- package/packages/core/dist/utils/file-scanner.d.ts +0 -62
- package/packages/core/dist/utils/file-scanner.d.ts.map +0 -1
- package/packages/core/dist/utils/file-walker.d.ts +0 -69
- package/packages/core/dist/utils/file-walker.d.ts.map +0 -1
- package/packages/core/dist/utils/formatters.d.ts +0 -98
- package/packages/core/dist/utils/formatters.d.ts.map +0 -1
- package/packages/core/dist/utils/fs.d.ts +0 -96
- package/packages/core/dist/utils/fs.d.ts.map +0 -1
- package/packages/core/dist/utils/git-cache.d.ts +0 -91
- package/packages/core/dist/utils/git-cache.d.ts.map +0 -1
- package/packages/core/dist/utils/git-spec.d.ts +0 -19
- package/packages/core/dist/utils/git-spec.d.ts.map +0 -1
- package/packages/core/dist/utils/git-url-detection.d.ts +0 -92
- package/packages/core/dist/utils/git-url-detection.d.ts.map +0 -1
- package/packages/core/dist/utils/git-url-parser.d.ts +0 -53
- package/packages/core/dist/utils/git-url-parser.d.ts.map +0 -1
- package/packages/core/dist/utils/hash-utils.d.ts +0 -14
- package/packages/core/dist/utils/hash-utils.d.ts.map +0 -1
- package/packages/core/dist/utils/home-directory.d.ts +0 -41
- package/packages/core/dist/utils/home-directory.d.ts.map +0 -1
- package/packages/core/dist/utils/ini.d.ts +0 -43
- package/packages/core/dist/utils/ini.d.ts.map +0 -1
- package/packages/core/dist/utils/install-error-messages.d.ts +0 -32
- package/packages/core/dist/utils/install-error-messages.d.ts.map +0 -1
- package/packages/core/dist/utils/install-helpers.d.ts +0 -6
- package/packages/core/dist/utils/install-helpers.d.ts.map +0 -1
- package/packages/core/dist/utils/jsonc.d.ts +0 -18
- package/packages/core/dist/utils/jsonc.d.ts.map +0 -1
- package/packages/core/dist/utils/lockfile-validation.d.ts +0 -22
- package/packages/core/dist/utils/lockfile-validation.d.ts.map +0 -1
- package/packages/core/dist/utils/lockfile-yml.d.ts +0 -23
- package/packages/core/dist/utils/lockfile-yml.d.ts.map +0 -1
- package/packages/core/dist/utils/logger.d.ts +0 -19
- package/packages/core/dist/utils/logger.d.ts.map +0 -1
- package/packages/core/dist/utils/manifest-paths.d.ts +0 -10
- package/packages/core/dist/utils/manifest-paths.d.ts.map +0 -1
- package/packages/core/dist/utils/match-path.d.ts +0 -10
- package/packages/core/dist/utils/match-path.d.ts.map +0 -1
- package/packages/core/dist/utils/name-version-parser.d.ts +0 -20
- package/packages/core/dist/utils/name-version-parser.d.ts.map +0 -1
- package/packages/core/dist/utils/package-copy.d.ts +0 -40
- package/packages/core/dist/utils/package-copy.d.ts.map +0 -1
- package/packages/core/dist/utils/package-filters.d.ts +0 -2
- package/packages/core/dist/utils/package-filters.d.ts.map +0 -1
- package/packages/core/dist/utils/package-index-yml.d.ts +0 -26
- package/packages/core/dist/utils/package-index-yml.d.ts.map +0 -1
- package/packages/core/dist/utils/package-merge.d.ts +0 -13
- package/packages/core/dist/utils/package-merge.d.ts.map +0 -1
- package/packages/core/dist/utils/package-name.d.ts +0 -84
- package/packages/core/dist/utils/package-name.d.ts.map +0 -1
- package/packages/core/dist/utils/package-yml.d.ts +0 -11
- package/packages/core/dist/utils/package-yml.d.ts.map +0 -1
- package/packages/core/dist/utils/package.d.ts +0 -5
- package/packages/core/dist/utils/package.d.ts.map +0 -1
- package/packages/core/dist/utils/path-comparison.d.ts +0 -52
- package/packages/core/dist/utils/path-comparison.d.ts.map +0 -1
- package/packages/core/dist/utils/path-normalization.d.ts +0 -54
- package/packages/core/dist/utils/path-normalization.d.ts.map +0 -1
- package/packages/core/dist/utils/path-resolution.d.ts +0 -36
- package/packages/core/dist/utils/path-resolution.d.ts.map +0 -1
- package/packages/core/dist/utils/paths.d.ts +0 -35
- package/packages/core/dist/utils/paths.d.ts.map +0 -1
- package/packages/core/dist/utils/pattern-matcher.d.ts +0 -66
- package/packages/core/dist/utils/pattern-matcher.d.ts.map +0 -1
- package/packages/core/dist/utils/platform-yaml-merge.d.ts +0 -6
- package/packages/core/dist/utils/platform-yaml-merge.d.ts.map +0 -1
- package/packages/core/dist/utils/plugin-naming.d.ts +0 -179
- package/packages/core/dist/utils/plugin-naming.d.ts.map +0 -1
- package/packages/core/dist/utils/qualified-name.d.ts +0 -41
- package/packages/core/dist/utils/qualified-name.d.ts.map +0 -1
- package/packages/core/dist/utils/resolution-mode.d.ts +0 -10
- package/packages/core/dist/utils/resolution-mode.d.ts.map +0 -1
- package/packages/core/dist/utils/resource-arg-parser.d.ts +0 -48
- package/packages/core/dist/utils/resource-arg-parser.d.ts.map +0 -1
- package/packages/core/dist/utils/root-file-extractor.d.ts +0 -55
- package/packages/core/dist/utils/root-file-extractor.d.ts.map +0 -1
- package/packages/core/dist/utils/root-file-merger.d.ts +0 -15
- package/packages/core/dist/utils/root-file-merger.d.ts.map +0 -1
- package/packages/core/dist/utils/source-operation-arguments.d.ts +0 -48
- package/packages/core/dist/utils/source-operation-arguments.d.ts.map +0 -1
- package/packages/core/dist/utils/tarball.d.ts +0 -30
- package/packages/core/dist/utils/tarball.d.ts.map +0 -1
- package/packages/core/dist/utils/validation/dependency-containment.d.ts +0 -37
- package/packages/core/dist/utils/validation/dependency-containment.d.ts.map +0 -1
- package/packages/core/dist/utils/validation/index.d.ts +0 -4
- package/packages/core/dist/utils/validation/index.d.ts.map +0 -1
- package/packages/core/dist/utils/validation/manifest.d.ts +0 -16
- package/packages/core/dist/utils/validation/manifest.d.ts.map +0 -1
- package/packages/core/dist/utils/validation/package-files.d.ts +0 -13
- package/packages/core/dist/utils/validation/package-files.d.ts.map +0 -1
- package/packages/core/dist/utils/validation/version.d.ts +0 -24
- package/packages/core/dist/utils/validation/version.d.ts.map +0 -1
- package/packages/core/dist/utils/version-generator.d.ts +0 -58
- package/packages/core/dist/utils/version-generator.d.ts.map +0 -1
- package/packages/core/dist/utils/version-ranges.d.ts +0 -88
- package/packages/core/dist/utils/version-ranges.d.ts.map +0 -1
- package/packages/core/dist/utils/workspace-index-healer.d.ts +0 -35
- package/packages/core/dist/utils/workspace-index-healer.d.ts.map +0 -1
- package/packages/core/dist/utils/workspace-index-helpers.d.ts +0 -36
- package/packages/core/dist/utils/workspace-index-helpers.d.ts.map +0 -1
- package/packages/core/dist/utils/workspace-index-ownership.d.ts +0 -32
- package/packages/core/dist/utils/workspace-index-ownership.d.ts.map +0 -1
- package/packages/core/dist/utils/workspace-index-yml.d.ts +0 -9
- package/packages/core/dist/utils/workspace-index-yml.d.ts.map +0 -1
- package/packages/core/dist/utils/workspace-package-context.d.ts +0 -6
- package/packages/core/dist/utils/workspace-package-context.d.ts.map +0 -1
- package/packages/core/package.json +0 -96
- package/packages/gui/package.json +0 -24
- package/packages/gui/src-tauri/Cargo.toml +0 -14
- package/plans/wave-resolver.md +0 -254
- package/specs/README.md +0 -89
- package/specs/add/README.md +0 -232
- package/specs/agents-claude.md +0 -570
- package/specs/agents-frontmatter.md +0 -967
- package/specs/agents-opencode.md +0 -622
- package/specs/architecture.md +0 -102
- package/specs/auth/README.md +0 -17
- package/specs/auth/auth-http-contract.md +0 -25
- package/specs/auth/cli/credentials.md +0 -39
- package/specs/auth/cli/login.md +0 -32
- package/specs/auth/cli/logout.md +0 -16
- package/specs/claude-mcp.md +0 -1065
- package/specs/claude-plugins-marketplace-creation.md +0 -564
- package/specs/claude-plugins-marketplace.md +0 -363
- package/specs/claude-plugins.md +0 -413
- package/specs/claude-settings.md +0 -945
- package/specs/cli-options.md +0 -134
- package/specs/codex-mcp.md +0 -114
- package/specs/commands-overview.md +0 -184
- package/specs/directory-layout.md +0 -95
- package/specs/install/README.md +0 -35
- package/specs/install/git-cache.md +0 -403
- package/specs/install/git-sources.md +0 -401
- package/specs/install/install-behavior.md +0 -776
- package/specs/install/marketplace-installation.md +0 -839
- package/specs/install/package-yml-canonical.md +0 -247
- package/specs/install/plugin-installation.md +0 -642
- package/specs/install/plugin-source-normalization.md +0 -773
- package/specs/install/version-resolution.md +0 -270
- package/specs/list/README.md +0 -38
- package/specs/new/README.md +0 -618
- package/specs/new/SUMMARY.md +0 -284
- package/specs/new/scope-behavior.md +0 -791
- package/specs/opencode-tools.md +0 -345
- package/specs/package/README.md +0 -61
- package/specs/package/nested-packages-and-parent-packages.md +0 -80
- package/specs/package/package-index-yml.md +0 -162
- package/specs/package/package-root-layout.md +0 -96
- package/specs/package/registry-payload-and-copy.md +0 -83
- package/specs/package/universal-content.md +0 -121
- package/specs/package-sources.md +0 -266
- package/specs/platforms/README.md +0 -52
- package/specs/platforms/configuration.md +0 -571
- package/specs/platforms/detection.md +0 -552
- package/specs/platforms/directory-layout.md +0 -599
- package/specs/platforms/examples.md +0 -1146
- package/specs/platforms/flow-reference.md +0 -1343
- package/specs/platforms/flows.md +0 -1931
- package/specs/platforms/map-pipeline.md +0 -801
- package/specs/platforms/overview.md +0 -349
- package/specs/platforms/specification.md +0 -700
- package/specs/platforms/troubleshooting.md +0 -659
- package/specs/platforms/universal-converter.md +0 -520
- package/specs/publish/README.md +0 -397
- package/specs/registry.md +0 -105
- package/specs/remove/README.md +0 -338
- package/specs/save/README.md +0 -189
- package/specs/scope-management.md +0 -92
- package/specs/set/README.md +0 -523
- package/specs/set/set-behavior.md +0 -563
- package/specs/uninstall/README.md +0 -250
- package/turbo.json +0 -15
- /package/packages/cli/dist/{add-GJYCLX7R.js.map → add-S3TXZ7PY.js.map} +0 -0
- /package/packages/cli/dist/{chunk-Q4L55OR7.js.map → chunk-34ABTOH6.js.map} +0 -0
- /package/packages/cli/dist/{chunk-TMBWLJM6.js.map → chunk-3H2IZLIF.js.map} +0 -0
- /package/packages/cli/dist/{chunk-JEO6LVPR.js.map → chunk-5FE6RV3U.js.map} +0 -0
- /package/packages/cli/dist/{chunk-FKPRQFGS.js.map → chunk-74ECD7IG.js.map} +0 -0
- /package/packages/cli/dist/{chunk-623RDPFN.js.map → chunk-7Y5YLMSY.js.map} +0 -0
- /package/packages/cli/dist/{chunk-3TR4FXOO.js.map → chunk-CFG5HDE3.js.map} +0 -0
- /package/packages/cli/dist/{chunk-6LGM6BXC.js.map → chunk-HBZNMQQR.js.map} +0 -0
- /package/packages/cli/dist/{chunk-L5PUJVGZ.js.map → chunk-JX4Y6QJH.js.map} +0 -0
- /package/packages/cli/dist/{chunk-4XPF676E.js.map → chunk-KFTWXB3Y.js.map} +0 -0
- /package/packages/cli/dist/{chunk-V3QNOLM2.js.map → chunk-MXXYQE3B.js.map} +0 -0
- /package/packages/cli/dist/{chunk-GKCVGADO.js.map → chunk-NYRU3BHW.js.map} +0 -0
- /package/packages/cli/dist/{chunk-VGBYUNXA.js.map → chunk-PHIA5RBP.js.map} +0 -0
- /package/packages/cli/dist/{chunk-73II4CTT.js.map → chunk-RRHHWPZW.js.map} +0 -0
- /package/packages/cli/dist/{chunk-HEFHYKEV.js.map → chunk-ZGS7FV4C.js.map} +0 -0
- /package/packages/cli/dist/{default-S2KVP6GL.js.map → default-U2DJBXF5.js.map} +0 -0
- /package/packages/cli/dist/{fork-package-Q5PTK4VH.js.map → fork-package-RVF3K3UT.js.map} +0 -0
- /package/packages/cli/dist/{install-V4QUIYEF.js.map → install-ZSAOUP2Z.js.map} +0 -0
- /package/packages/cli/dist/{list-YLJXUNOA.js.map → list-RPO3EAV4.js.map} +0 -0
- /package/packages/cli/dist/{login-3JZGY7BL.js.map → login-EPUMFFAE.js.map} +0 -0
- /package/packages/cli/dist/{logout-NY7ZZCWN.js.map → logout-L255ARWR.js.map} +0 -0
- /package/packages/cli/dist/{move-XEQ7AIFY.js.map → move-N5DTFIGV.js.map} +0 -0
- /package/packages/cli/dist/{new-WBA3L5FE.js.map → new-C52XLJRT.js.map} +0 -0
- /package/packages/cli/dist/{publish-4PYKKKS7.js.map → publish-VKYJBIA5.js.map} +0 -0
- /package/packages/cli/dist/{remove-KO6WX53L.js.map → remove-TM77NHV3.js.map} +0 -0
- /package/packages/cli/dist/{resolve-named-dependency-UCMYGP7G.js.map → resolve-named-dependency-SUDVLL5Y.js.map} +0 -0
- /package/packages/cli/dist/{resource-discoverer-57TCCJDN.js.map → resource-discoverer-MGVEBFMJ.js.map} +0 -0
- /package/packages/cli/dist/{sync-ZHKH6KH3.js.map → sync-WFKBS4L2.js.map} +0 -0
- /package/packages/cli/dist/{uninstall-2C6KORSJ.js.map → uninstall-STONXT6H.js.map} +0 -0
- /package/packages/cli/dist/{view-BDBG4IP6.js.map → view-Y52V6DUL.js.map} +0 -0
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../core/src/core/install/resource-discoverer.ts", "../../core/src/utils/file-walker.ts", "../../core/src/core/resources/markdown-metadata.ts", "../../core/src/core/resources/resource-naming.ts", "../../core/src/core/install/plugin-detector.ts", "../../core/src/core/install/marketplace-handler.ts", "../../core/src/core/install/unified/context-builders.ts", "../../core/src/core/install/package-input.ts", "../../core/src/utils/git-url-detection.ts", "../../core/src/core/package-management.ts", "../../core/src/utils/version-ranges.ts", "../../core/src/utils/version-generator.ts", "../../core/src/core/package-versioning.ts", "../../core/src/core/package.ts", "../../core/src/core/install/plugin-transformer.ts", "../../core/src/core/conversion-context/creation.ts", "../../core/src/core/conversion-context/serialization.ts", "../../core/src/core/install/format-detector.ts", "../../core/src/core/install/plugin-metadata-resolver.ts", "../../core/src/core/install/path-package-loader.ts", "../../core/src/utils/tarball.ts", "../../core/src/core/install/sources/base.ts", "../../core/src/core/install/local-source-resolution.ts", "../../core/src/core/source-resolution/resolve-registry-version.ts", "../../core/src/core/install/version-selection.ts", "../../core/src/core/remote-pull.ts", "../../core/src/utils/manifest-paths.ts", "../../core/src/core/platform/platform-utils.ts", "../../core/src/core/platform/registry-entry-filter.ts", "../../core/src/utils/package-merge.ts", "../../core/src/core/cache-manager.ts", "../../core/src/utils/git-cache.ts", "../../core/src/utils/error-reasons.ts", "../../core/src/core/install/remote-reporting.ts", "../../core/src/core/scoping/package-scoping.ts", "../../core/src/core/install/sources/registry-source.ts", "../../core/src/core/install/sources/path-source.ts", "../../core/src/core/install/base-detector.ts", "../../core/src/utils/pattern-matcher.ts", "../../core/src/core/install/plugin-sources.ts", "../../core/src/utils/install-error-messages.ts", "../../core/src/core/git-clone.ts", "../../core/src/core/install/git-package-loader.ts", "../../core/src/core/install/sources/git-source.ts", "../../core/src/utils/workspace-index-yml.ts", "../../core/src/core/install/sources/workspace-source.ts", "../../core/src/core/install/sources/loader-factory.ts", "../../core/src/core/install/unified/context-helpers.ts", "../../core/src/core/install/preprocessing/base-resolver.ts", "../../core/src/core/install/unified/phases/load-package.ts", "../../core/src/core/install/import-flow-converter.ts", "../../core/src/core/flows/map-pipeline/utils.ts", "../../core/src/core/flows/map-pipeline/context.ts", "../../core/src/core/flows/map-pipeline/operations/set.ts", "../../core/src/core/flows/map-pipeline/operations/rename.ts", "../../core/src/core/flows/map-pipeline/operations/unset.ts", "../../core/src/core/flows/map-pipeline/operations/switch.ts", "../../core/src/core/flows/map-pipeline/operations/transform.ts", "../../core/src/core/flows/map-pipeline/operations/copy.ts", "../../core/src/core/flows/map-pipeline/operations/pipe.ts", "../../core/src/core/flows/map-pipeline/index.ts", "../../core/src/core/flows/flow-transforms.ts", "../../core/src/core/flows/markdown.ts", "../../core/src/core/glob-target-mapping.ts", "../../core/src/core/install/format-group-merger.ts", "../../core/src/core/install/conversion-context.ts", "../../core/src/core/install/conversion-coordinator.ts", "../../core/src/core/install/unified/phases/convert.ts", "../../core/src/utils/match-path.ts", "../../core/src/core/install/strategies/helpers/temp-directory.ts", "../../core/src/core/install/operations/conflict-handler.ts", "../../core/src/core/interaction-policy.ts", "../../core/src/core/openpackage.ts", "../../core/src/core/install/install-helpers.ts", "../../core/src/core/install/unified/phases/conflicts.ts", "../../core/src/core/platform/platform-root-files.ts", "../../core/src/core/install/helpers/file-discovery.ts", "../../core/src/core/install/operations/root-files.ts", "../../core/src/utils/root-file-extractor.ts", "../../core/src/utils/root-file-merger.ts", "../../core/src/core/install/flow-index-installer.ts", "../../core/src/utils/lockfile-yml.ts", "../../core/src/utils/collections.ts", "../../core/src/core/install/strategies/conversion-strategy.ts", "../../core/src/core/install/strategies/helpers/result-converter.ts", "../../core/src/core/install/strategies/helpers/flow-helpers.ts", "../../core/src/core/install/helpers/result-logging.ts", "../../core/src/core/flows/flow-execution-coordinator.ts", "../../core/src/core/flows/flow-executor.ts", "../../core/src/utils/hash-utils.ts", "../../core/src/core/platform-yaml-merge.ts", "../../core/src/core/flows/flow-key-mapper.ts", "../../core/src/core/flows/flow-key-extractor.ts", "../../core/src/core/flows/source-resolver.ts", "../../core/src/core/flows/source-schema-validator.ts", "../../core/src/core/resources/resource-query.ts", "../../core/src/core/list/untracked-files-scanner.ts", "../../core/src/core/resources/resource-namespace.ts", "../../core/src/core/resources/resource-classifier.ts", "../../core/src/core/resources/resource-builder.ts", "../../core/src/core/resources/resource-resolver.ts", "../../core/src/core/resources/scope-traversal.ts", "../../core/src/core/resources/disambiguation-prompt.ts", "../../core/src/core/resources/resource-spec.ts", "../../core/src/utils/workspace-index-helpers.ts", "../../core/src/core/install/strategies/helpers/platform-filtering.ts", "../../core/src/core/flows/import-pipeline.ts", "../../core/src/core/install/strategies/base-strategy.ts", "../../core/src/core/flows/platform-converter.ts", "../../core/src/core/install/strategies/flow-based-strategy.ts", "../../core/src/core/install/conflicts/file-conflict-resolver.ts", "../../core/src/utils/package-index-yml.ts", "../../core/src/core/install/conflicts/namespace-path.ts", "../../core/src/core/install/index-based-installer.ts", "../../core/src/core/install/resource-pattern-normalization.ts", "../../core/src/core/install/strategies/strategy-selector.ts", "../../core/src/core/install/helpers/format-detection.ts", "../../core/src/core/install/flow-based-installer.ts", "../../core/src/core/install/stale-file-cleanup.ts", "../../core/src/core/uninstall/flow-aware-uninstaller.ts", "../../core/src/utils/cleanup-empty-parents.ts", "../../core/src/core/platform/directory-preservation.ts", "../../core/src/core/install/helpers/result-aggregation.ts", "../../core/src/core/install/operations/installation-executor.ts", "../../core/src/core/install/platform-resolution.ts", "../../core/src/core/install/package-installation.ts", "../../core/src/core/install/unified/phases/execute.ts", "../../core/src/core/install/unified/phases/manifest.ts", "../../core/src/core/install/install-reporting.ts", "../../core/src/core/install/unified/phases/report.ts", "../../core/src/core/uninstall/uninstall-pipeline.ts", "../../core/src/utils/workspace-index-healer.ts", "../../core/src/utils/workspace-index-ownership.ts", "../../core/src/core/platform/root-file-uninstaller.ts", "../../core/src/core/install/orchestrator/subsumption-resolver.ts", "../../core/src/core/sync/sync-version-checker.ts", "../../core/src/core/sync/sync-version-resolver.ts", "../../core/src/core/install/unified/pipeline.ts", "../../core/src/core/install/unified/multi-context-pipeline.ts", "../../core/src/core/install/preprocessing/convenience-preprocessor.ts", "../../core/src/core/install/convenience-matchers.ts", "../../core/src/core/install/resource-search.ts", "../../core/src/core/install/resource-selection-menu.ts"],
|
|
4
|
+
"sourcesContent": ["/**\n * Resource Discovery Module\n * \n * Discovers all installable resources within a package/plugin:\n * - Agents (agents/.../*.md)\n * - Skills (skills/.../ directories with SKILL.md)\n * - Commands (commands/.../*.md)\n * - Rules (rules/.../*.md)\n * - Hooks (hooks/.../)\n * - MCP servers (mcp.jsonc, mcp.json)\n */\n\nimport { join, basename, dirname, relative, resolve } from 'path';\nimport { walkFiles } from '../../utils/file-walker.js';\nimport { exists, readTextFile } from '../../utils/fs.js';\nimport { logger } from '../../utils/logger.js';\nimport { extractMarkdownResourceMetadata } from '../resources/markdown-metadata.js';\nimport { defaultNameFromPath, defaultNameFromSkillDir, preferFrontmatterName } from '../resources/resource-naming.js';\nimport { isMarkerFile } from '../resources/resource-registry.js';\nimport { loadMarketplaceManifest } from './plugin-detector.js';\nimport type { MarketplaceManifest } from './marketplace-handler.js';\nimport { DIR_PATTERNS, FILE_PATTERNS } from '../../constants/index.js';\nimport { findMarkerResourceFiles } from './resource-search.js';\nimport type {\n DiscoveredResource,\n ResourceDiscoveryResult,\n ResourceType\n} from './resource-types.js';\n\n/**\n * Discover all installable resources within a package\n * \n * @param basePath - Base path to search from (detectedBase or contentRoot)\n * @param repoRoot - Repository root path for relative path calculation\n * @returns Discovery result with all found resources\n */\nexport async function discoverResources(\n basePath: string,\n repoRoot: string\n): Promise<ResourceDiscoveryResult> {\n logger.debug('Discovering resources', { basePath, repoRoot });\n \n const basePathResolved = resolve(basePath);\n const repoRootResolved = resolve(repoRoot);\n \n const allResources: DiscoveredResource[] = [];\n \n // Discover each resource type\n const agents = await discoverAgents(basePathResolved, repoRootResolved);\n const skills = await discoverSkills(basePathResolved, repoRootResolved);\n const commands = await discoverCommands(basePathResolved, repoRootResolved);\n const rules = await discoverRules(basePathResolved, repoRootResolved);\n const hooks = await discoverHooks(basePathResolved, repoRootResolved);\n const mcp = await discoverMCP(basePathResolved, repoRootResolved);\n const { plugins, manifest: marketplaceManifest } = await discoverPlugins(basePathResolved);\n\n allResources.push(...agents, ...skills, ...commands, ...rules, ...hooks, ...mcp, ...plugins);\n \n // Group by type\n const byType = new Map<ResourceType, DiscoveredResource[]>();\n for (const resource of allResources) {\n const existing = byType.get(resource.resourceType) || [];\n existing.push(resource);\n byType.set(resource.resourceType, existing);\n }\n \n logger.info('Resource discovery complete', {\n total: allResources.length,\n agents: agents.length,\n skills: skills.length,\n commands: commands.length,\n rules: rules.length,\n hooks: hooks.length,\n mcp: mcp.length,\n plugins: plugins.length\n });\n \n return {\n all: allResources,\n byType,\n total: allResources.length,\n basePath: basePathResolved,\n repoRoot: repoRootResolved,\n marketplaceManifest\n };\n}\n\n/**\n * Discover agents (agents/.../*.md)\n */\nasync function discoverAgents(\n basePath: string,\n repoRoot: string\n): Promise<DiscoveredResource[]> {\n const resources: DiscoveredResource[] = [];\n const agentsDir = join(basePath, 'agents');\n \n if (!(await exists(agentsDir))) {\n return resources;\n }\n \n for await (const file of walkFiles(agentsDir)) {\n if (!file.endsWith('.md')) {\n continue;\n }\n \n const content = await readTextFile(file);\n const metadata = extractMarkdownResourceMetadata(content);\n const resourcePath = normalizeResourcePath(file, repoRoot);\n \n resources.push({\n resourceType: 'agent',\n resourcePath,\n displayName: preferFrontmatterName(metadata.name, defaultNameFromPath(file)),\n description: metadata.description,\n version: metadata.version,\n filePath: file,\n installKind: 'file',\n matchedBy: metadata.name ? 'frontmatter' : 'filename'\n });\n }\n \n return resources;\n}\n\n/**\n * Discover skills (skills/.../ directories with marker file)\n */\nasync function discoverSkills(\n basePath: string,\n repoRoot: string\n): Promise<DiscoveredResource[]> {\n const resources: DiscoveredResource[] = [];\n const skillFiles = await findMarkerResourceFiles(basePath, 'skill');\n\n for (const file of skillFiles) {\n if (!isMarkerFile(basename(file), 'skill')) {\n continue;\n }\n\n const skillDir = dirname(file);\n const content = await readTextFile(file);\n const metadata = extractMarkdownResourceMetadata(content);\n const resourcePath = normalizeResourcePath(skillDir, repoRoot);\n \n resources.push({\n resourceType: 'skill',\n resourcePath,\n displayName: preferFrontmatterName(metadata.name, defaultNameFromSkillDir(skillDir)),\n description: metadata.description,\n version: metadata.version,\n filePath: skillDir,\n installKind: 'directory',\n matchedBy: metadata.name ? 'frontmatter' : 'dirname'\n });\n }\n \n return resources;\n}\n\n/**\n * Discover commands (commands/.../*.md)\n */\nasync function discoverCommands(\n basePath: string,\n repoRoot: string\n): Promise<DiscoveredResource[]> {\n const resources: DiscoveredResource[] = [];\n const commandsDir = join(basePath, 'commands');\n \n if (!(await exists(commandsDir))) {\n return resources;\n }\n \n for await (const file of walkFiles(commandsDir)) {\n if (!file.endsWith('.md')) {\n continue;\n }\n \n const content = await readTextFile(file);\n const metadata = extractMarkdownResourceMetadata(content);\n const resourcePath = normalizeResourcePath(file, repoRoot);\n \n resources.push({\n resourceType: 'command',\n resourcePath,\n displayName: preferFrontmatterName(metadata.name, defaultNameFromPath(file)),\n description: metadata.description,\n version: metadata.version,\n filePath: file,\n installKind: 'file',\n matchedBy: metadata.name ? 'frontmatter' : 'filename'\n });\n }\n \n return resources;\n}\n\n/**\n * Discover rules (rules/.../*.md)\n */\nasync function discoverRules(\n basePath: string,\n repoRoot: string\n): Promise<DiscoveredResource[]> {\n const resources: DiscoveredResource[] = [];\n const rulesDir = join(basePath, 'rules');\n \n if (!(await exists(rulesDir))) {\n return resources;\n }\n \n for await (const file of walkFiles(rulesDir)) {\n if (!file.endsWith('.md')) {\n continue;\n }\n \n const content = await readTextFile(file);\n const metadata = extractMarkdownResourceMetadata(content);\n const resourcePath = normalizeResourcePath(file, repoRoot);\n \n resources.push({\n resourceType: 'rule',\n resourcePath,\n displayName: preferFrontmatterName(metadata.name, defaultNameFromPath(file)),\n description: metadata.description,\n version: metadata.version,\n filePath: file,\n installKind: 'file',\n matchedBy: metadata.name ? 'frontmatter' : 'filename'\n });\n }\n \n return resources;\n}\n\n/**\n * Discover hooks (hooks/.../)\n */\nasync function discoverHooks(\n basePath: string,\n repoRoot: string\n): Promise<DiscoveredResource[]> {\n const resources: DiscoveredResource[] = [];\n const hooksDir = join(basePath, 'hooks');\n \n if (!(await exists(hooksDir))) {\n return resources;\n }\n \n // Discover hook files or directories\n for await (const file of walkFiles(hooksDir)) {\n const resourcePath = normalizeResourcePath(file, repoRoot);\n const displayName = basename(file);\n \n resources.push({\n resourceType: 'hook',\n resourcePath,\n displayName,\n filePath: file,\n installKind: 'file'\n });\n }\n \n return resources;\n}\n\n/**\n * Discover MCP server configuration files\n */\nasync function discoverMCP(\n basePath: string,\n repoRoot: string\n): Promise<DiscoveredResource[]> {\n const resources: DiscoveredResource[] = [];\n const mcpFiles = ['mcp.jsonc', 'mcp.json'];\n \n for (const filename of mcpFiles) {\n const filePath = join(basePath, filename);\n \n if (await exists(filePath)) {\n const resourcePath = normalizeResourcePath(filePath, repoRoot);\n \n resources.push({\n resourceType: 'mcp',\n resourcePath,\n displayName: 'configs',\n description: 'Model Context Protocol server configuration',\n filePath,\n installKind: 'file'\n });\n \n // Only return the first found\n break;\n }\n }\n \n return resources;\n}\n\n/**\n * Discover marketplace plugin entries from .claude-plugin/marketplace.json.\n * Returns the parsed manifest alongside discovered resources so callers\n * can stash it on the discovery result (avoids re-loading later).\n */\nasync function discoverPlugins(\n basePath: string\n): Promise<{ plugins: DiscoveredResource[]; manifest: MarketplaceManifest | null }> {\n const manifest = await loadMarketplaceManifest(basePath);\n if (!manifest) return { plugins: [], manifest: null };\n\n const plugins = manifest.plugins.map(plugin => ({\n resourceType: 'plugin' as ResourceType,\n resourcePath: plugin.name,\n displayName: plugin.name,\n description: plugin.description,\n version: plugin.version,\n filePath: join(basePath, DIR_PATTERNS.CLAUDE_PLUGIN, FILE_PATTERNS.MARKETPLACE_JSON),\n installKind: 'plugin' as const,\n }));\n\n return { plugins, manifest };\n}\n\n/**\n * Normalize resource path to be relative to repository root\n */\nfunction normalizeResourcePath(\n absolutePath: string,\n repoRoot: string\n): string {\n const rel = relative(repoRoot, absolutePath);\n return rel.replace(/\\\\/g, '/').replace(/^\\.\\//, '');\n}\n", "/**\n * File Walker Utility\n * \n * Provides efficient file system traversal utilities for walking directory trees.\n * Used across install, save, status, pack, and other commands.\n */\n\nimport { promises as fs } from 'fs';\nimport { join } from 'path';\n\n/**\n * Filter predicate for file walking\n */\nexport type FileFilter = (path: string, isDirectory: boolean) => boolean | Promise<boolean>;\n\n/**\n * Options for file walking\n */\nexport interface WalkOptions {\n /**\n * Filter predicate to include/exclude files and directories\n */\n filter?: FileFilter;\n \n /**\n * Follow symbolic links (default: false)\n */\n followSymlinks?: boolean;\n \n /**\n * Maximum depth to traverse (default: unlimited)\n */\n maxDepth?: number;\n \n /**\n * Include directories in results (default: false, only files)\n */\n includeDirs?: boolean;\n}\n\n/**\n * Async generator that walks a directory tree and yields file paths\n * \n * @param dir - Directory to walk\n * @param options - Walking options\n * \n * @example\n * for await (const filePath of walkFiles('/path/to/dir')) {\n * console.log(filePath);\n * }\n */\nexport async function* walkFiles(\n dir: string,\n options: WalkOptions = {}\n): AsyncGenerator<string> {\n const {\n filter,\n followSymlinks = false,\n maxDepth = Infinity,\n includeDirs = false\n } = options;\n \n yield* walkFilesInternal(dir, filter, followSymlinks, maxDepth, includeDirs, 0);\n}\n\n/**\n * Internal recursive walker\n */\nasync function* walkFilesInternal(\n dir: string,\n filter: FileFilter | undefined,\n followSymlinks: boolean,\n maxDepth: number,\n includeDirs: boolean,\n currentDepth: number\n): AsyncGenerator<string> {\n // Check depth limit\n if (currentDepth > maxDepth) {\n return;\n }\n \n try {\n const entries = await fs.readdir(dir, { withFileTypes: true });\n \n for (const entry of entries) {\n const fullPath = join(dir, entry.name);\n \n // Handle symlinks\n let isDirectory = entry.isDirectory();\n let isFile = entry.isFile();\n \n if (entry.isSymbolicLink() && followSymlinks) {\n try {\n const stat = await fs.stat(fullPath);\n isDirectory = stat.isDirectory();\n isFile = stat.isFile();\n } catch {\n // Skip broken symlinks\n continue;\n }\n } else if (entry.isSymbolicLink()) {\n // Skip symlinks if not following\n continue;\n }\n \n // Apply filter\n if (filter) {\n const shouldInclude = await filter(fullPath, isDirectory);\n if (!shouldInclude) {\n continue;\n }\n }\n \n // Yield directories if requested\n if (isDirectory && includeDirs) {\n yield fullPath;\n }\n \n // Recurse into directories\n if (isDirectory) {\n yield* walkFilesInternal(\n fullPath,\n filter,\n followSymlinks,\n maxDepth,\n includeDirs,\n currentDepth + 1\n );\n } else if (isFile) {\n // Yield files\n yield fullPath;\n }\n }\n } catch (error) {\n // Ignore permission errors and continue\n if ((error as NodeJS.ErrnoException).code !== 'EACCES' && \n (error as NodeJS.ErrnoException).code !== 'EPERM') {\n throw error;\n }\n }\n}\n\n/**\n * Walk directory and collect all files into an array\n * \n * @param dir - Directory to walk\n * @param options - Walking options\n * @returns Array of file paths\n */\nexport async function collectFiles(\n dir: string,\n options: WalkOptions = {}\n): Promise<string[]> {\n const files: string[] = [];\n \n for await (const filePath of walkFiles(dir, options)) {\n files.push(filePath);\n }\n \n return files;\n}\n\n/**\n * Walk directory with a simple include/exclude pattern filter\n * \n * @param dir - Directory to walk\n * @param includePatterns - Patterns to include (minimatch)\n * @param excludePatterns - Patterns to exclude (minimatch)\n * @returns Async generator of matching file paths\n */\nexport async function* walkWithPatterns(\n dir: string,\n includePatterns: string[] = ['**/*'],\n excludePatterns: string[] = []\n): AsyncGenerator<string> {\n const { minimatch } = await import('minimatch');\n const { relative } = await import('path');\n \n const filter: FileFilter = (path: string, isDirectory: boolean) => {\n // Always traverse directories\n if (isDirectory) {\n return true;\n }\n \n const relativePath = relative(dir, path);\n \n // Check exclusions first\n for (const pattern of excludePatterns) {\n if (minimatch(relativePath, pattern, { dot: true })) {\n return false;\n }\n }\n \n // Check inclusions\n for (const pattern of includePatterns) {\n if (minimatch(relativePath, pattern, { dot: true })) {\n return true;\n }\n }\n \n return false;\n };\n \n yield* walkFiles(dir, { filter });\n}\n\n/**\n * Count files in a directory (without collecting them all)\n * \n * @param dir - Directory to walk\n * @param options - Walking options\n * @returns Number of files\n */\nexport async function countFiles(\n dir: string,\n options: WalkOptions = {}\n): Promise<number> {\n let count = 0;\n \n for await (const _ of walkFiles(dir, options)) {\n count++;\n }\n \n return count;\n}\n", "import { splitFrontmatter } from '../markdown-frontmatter.js';\n\nexport interface MarkdownResourceMetadata {\n name?: string;\n description?: string;\n version?: string;\n}\n\nexport function extractMarkdownResourceMetadata(content: string): MarkdownResourceMetadata {\n const { frontmatter } = splitFrontmatter(content);\n\n if (!frontmatter || typeof frontmatter !== 'object') {\n return {};\n }\n\n return {\n name: typeof frontmatter.name === 'string' ? frontmatter.name : undefined,\n description: typeof frontmatter.description === 'string' ? frontmatter.description : undefined,\n version: extractVersionFromFrontmatter(frontmatter),\n };\n}\n\nfunction extractVersionFromFrontmatter(frontmatter: any): string | undefined {\n if (!frontmatter || typeof frontmatter !== 'object') {\n return undefined;\n }\n\n const version = frontmatter.version ?? frontmatter.metadata?.version;\n\n if (typeof version === 'string') {\n const trimmed = version.trim();\n return trimmed.length > 0 ? trimmed : undefined;\n }\n\n return undefined;\n}\n", "import { basename } from 'path';\n\nexport function stripExtension(filename: string): string {\n return filename.replace(/\\.[^.]+$/, '') || filename;\n}\n\nexport function defaultNameFromPath(filePath: string): string {\n return stripExtension(basename(filePath));\n}\n\nexport function defaultNameFromSkillDir(dirPath: string): string {\n return basename(dirPath);\n}\n\nexport function preferFrontmatterName(\n frontmatterName: string | undefined,\n fallbackName: string\n): string {\n return frontmatterName && frontmatterName.trim().length > 0\n ? frontmatterName\n : fallbackName;\n}\n", "import { join } from 'path';\nimport { promises as fs } from 'fs';\nimport { exists, readTextFile } from '../../utils/fs.js';\nimport { logger } from '../../utils/logger.js';\nimport { DIR_PATTERNS, FILE_PATTERNS } from '../../constants/index.js';\nimport type { MarketplacePluginEntry, MarketplaceManifest } from './marketplace-handler.js';\nimport { parseMarketplace } from './marketplace-handler.js';\n\nexport type PluginType = 'individual' | 'marketplace' | 'marketplace-defined';\n\nexport interface PluginDetectionResult {\n isPlugin: boolean;\n type?: PluginType;\n manifestPath?: string;\n}\n\n/**\n * Detect if a directory contains a Claude Code plugin.\n * \n * Detection order:\n * 1. Check for .claude-plugin/plugin.json (individual plugin)\n * 2. Check for .claude-plugin/marketplace.json (marketplace)\n * \n * @param dirPath - Absolute path to directory to check\n * @returns Detection result with plugin type if found\n */\nexport async function detectPluginType(dirPath: string): Promise<PluginDetectionResult> {\n const pluginDir = join(dirPath, DIR_PATTERNS.CLAUDE_PLUGIN);\n \n // Check for individual plugin\n const pluginManifestPath = join(pluginDir, FILE_PATTERNS.PLUGIN_JSON);\n if (await exists(pluginManifestPath)) {\n logger.info('Detected individual Claude Code plugin', { path: pluginManifestPath });\n return {\n isPlugin: true,\n type: 'individual',\n manifestPath: pluginManifestPath\n };\n }\n \n // Check for marketplace\n const marketplaceManifestPath = join(pluginDir, FILE_PATTERNS.MARKETPLACE_JSON);\n if (await exists(marketplaceManifestPath)) {\n logger.info('Detected Claude Code plugin marketplace', { path: marketplaceManifestPath });\n return {\n isPlugin: true,\n type: 'marketplace',\n manifestPath: marketplaceManifestPath\n };\n }\n \n // Not a plugin\n return { isPlugin: false };\n}\n\n/**\n * Detect if a directory contains a Claude Code plugin, with marketplace context.\n * \n * This enhanced detection supports marketplace-defined plugins (strict:false) that\n * may not have their own plugin.json file.\n * \n * Detection order:\n * 1. Check for .claude-plugin/plugin.json (individual plugin)\n * 2. Check for .claude-plugin/marketplace.json (marketplace)\n * 3. If marketplaceEntry exists AND strict:false, check for plugin content\n * \n * @param dirPath - Absolute path to directory to check\n * @param marketplaceEntry - Optional marketplace entry for this plugin\n * @returns Detection result with plugin type if found\n */\nexport async function detectPluginWithMarketplace(\n dirPath: string,\n marketplaceEntry?: MarketplacePluginEntry\n): Promise<PluginDetectionResult> {\n // First try standard detection\n const standardDetection = await detectPluginType(dirPath);\n \n if (standardDetection.isPlugin) {\n return standardDetection;\n }\n \n // If no plugin.json and marketplace entry with strict:false, check for plugin content\n if (marketplaceEntry?.strict === false) {\n const hasContent = await hasPluginContent(dirPath);\n \n if (hasContent) {\n logger.info('Detected marketplace-defined plugin (strict:false)', { \n dirPath,\n pluginName: marketplaceEntry.name \n });\n \n return {\n isPlugin: true,\n type: 'marketplace-defined'\n };\n }\n }\n \n return { isPlugin: false };\n}\n\n/**\n * Check if a directory has plugin content (commands, agents, etc.)\n * Used to validate marketplace-defined plugins that don't have plugin.json.\n * Exported for use in file-discovery and path-package-loader.\n */\nexport async function hasPluginContent(dirPath: string): Promise<boolean> {\n const pluginContentDirs = ['commands', 'agents', 'skills', 'hooks'];\n const pluginContentFiles = ['.mcp.json', '.lsp.json'];\n \n // Check for plugin content directories\n for (const subdir of pluginContentDirs) {\n const subdirPath = join(dirPath, subdir);\n if (await exists(subdirPath)) {\n // Check if directory is not empty\n try {\n const items = await fs.readdir(subdirPath);\n if (items.length > 0) {\n return true;\n }\n } catch {\n // Ignore errors reading directory\n }\n }\n }\n \n // Check for plugin content files\n for (const file of pluginContentFiles) {\n const filePath = join(dirPath, file);\n if (await exists(filePath)) {\n return true;\n }\n }\n \n return false;\n}\n\n/**\n * Load and parse the marketplace manifest from a package directory.\n * Returns null if no marketplace.json exists or parsing fails.\n *\n * @param basePath - Absolute path to the package/repository root\n * @returns Parsed marketplace manifest or null\n */\nexport async function loadMarketplaceManifest(basePath: string): Promise<MarketplaceManifest | null> {\n const manifestPath = join(basePath, DIR_PATTERNS.CLAUDE_PLUGIN, FILE_PATTERNS.MARKETPLACE_JSON);\n if (!(await exists(manifestPath))) return null;\n try {\n return await parseMarketplace(manifestPath, { repoPath: basePath });\n } catch {\n return null;\n }\n}\n\n/**\n * Validate that a plugin manifest can be parsed.\n * Returns true if the manifest is valid JSON.\n */\nexport async function validatePluginManifest(manifestPath: string): Promise<boolean> {\n try {\n const content = await readTextFile(manifestPath);\n JSON.parse(content);\n return true;\n } catch (error) {\n logger.error('Failed to parse plugin manifest', { manifestPath, error });\n return false;\n }\n}\n", "import { join, basename, relative, resolve } from 'path';\nimport { readTextFile, exists } from '../../utils/fs.js';\nimport { logger } from '../../utils/logger.js';\nimport { ValidationError, UserCancellationError } from '../../utils/errors.js';\nimport { buildGitInstallContext, buildPathInstallContext, buildResourceInstallContexts } from './unified/context-builders.js';\nimport type { InstallationContext } from './unified/context.js';\nimport { runUnifiedInstallPipeline } from './unified/pipeline.js';\nimport { detectPluginType, detectPluginWithMarketplace, validatePluginManifest } from './plugin-detector.js';\nimport type { CommandResult, InstallOptions, ExecutionContext } from '../../types/index.js';\nimport { resolveOutput, resolvePrompt } from '../ports/resolve.js';\nimport { runMultiContextPipeline } from './unified/multi-context-pipeline.js';\nimport { getLoaderForSource } from './sources/loader-factory.js';\nimport { applyBaseDetection } from './preprocessing/base-resolver.js';\nimport { resolveConvenienceResources } from './preprocessing/convenience-preprocessor.js';\nimport { discoverResources } from './resource-discoverer.js';\nimport { promptResourceSelection } from './resource-selection-menu.js';\nimport type { ResourceInstallationSpec } from './convenience-matchers.js';\nimport type { SelectedResource } from './resource-types.js';\nimport {\n normalizePluginSource,\n isRelativePathSource,\n isGitSource,\n type PluginSourceSpec,\n type NormalizedPluginSource\n} from './plugin-sources.js';\nimport { generateGitHubPackageName } from '../../utils/plugin-naming.js';\n\n/**\n * Claude Code marketplace manifest schema.\n * See: https://code.claude.com/docs/en/plugin-marketplaces\n */\nexport interface MarketplaceManifest {\n name: string;\n description?: string;\n homepage?: string;\n plugins: MarketplacePluginEntry[];\n}\n\n/**\n * Marketplace plugin entry.\n * Each entry defines a plugin and where to find it.\n * When strict is false, all plugin metadata can be defined here instead of in plugin.json.\n */\nexport interface MarketplacePluginEntry {\n // Required fields\n name: string;\n source: PluginSourceSpec;\n \n // Standard metadata fields\n description?: string;\n version?: string;\n author?: {\n name?: string;\n email?: string;\n url?: string;\n };\n homepage?: string;\n repository?: string | {\n type?: string;\n url?: string;\n };\n license?: string;\n keywords?: string[];\n category?: string;\n tags?: string[];\n \n // Component configuration fields\n commands?: string | string[];\n agents?: string | string[];\n hooks?: string | object;\n mcpServers?: string | object;\n lspServers?: string | object;\n \n // Strictness control\n strict?: boolean;\n}\n\n/**\n * Parse and validate a marketplace manifest.\n * \n * @param manifestPath - Path to marketplace.json file\n * @param context - Context for fallback naming\n * @returns Parsed marketplace manifest\n */\nexport async function parseMarketplace(\n manifestPath: string,\n context?: { gitUrl?: string; repoPath?: string }\n): Promise<MarketplaceManifest> {\n logger.debug('Parsing marketplace manifest', { manifestPath, context });\n \n try {\n const content = await readTextFile(manifestPath);\n const manifest = JSON.parse(content) as MarketplaceManifest;\n \n // If name is missing, use fallback from repo name\n if (!manifest.name && context?.repoPath) {\n manifest.name = basename(context.repoPath);\n logger.debug('Marketplace name missing, using repo name as fallback', { name: manifest.name });\n }\n \n // Validate required fields\n if (!manifest.name) {\n throw new ValidationError('Marketplace manifest missing required field: name');\n }\n \n if (!manifest.plugins || !Array.isArray(manifest.plugins)) {\n throw new ValidationError('Marketplace manifest missing or invalid plugins array');\n }\n \n if (manifest.plugins.length === 0) {\n throw new ValidationError('Marketplace contains no plugins');\n }\n \n // Validate each plugin entry\n for (const plugin of manifest.plugins) {\n if (!plugin.name) {\n throw new ValidationError('Marketplace plugin entry missing required field: name');\n }\n if (!plugin.source) {\n throw new ValidationError(`Plugin '${plugin.name}' missing required field: source`);\n }\n \n // Validate source can be normalized (will throw if invalid)\n try {\n normalizePluginSource(plugin.source, plugin.name);\n } catch (error) {\n throw new ValidationError(\n `Plugin '${plugin.name}' has invalid source: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n }\n \n logger.info('Parsed marketplace manifest', {\n name: manifest.name,\n pluginCount: manifest.plugins.length\n });\n \n return manifest;\n \n } catch (error) {\n if (error instanceof ValidationError) {\n throw error;\n }\n throw new ValidationError(`Failed to parse marketplace manifest at ${manifestPath}: ${error}`);\n }\n}\n\n/**\n * Display interactive plugin selection prompt using single selection.\n * \n * @param marketplace - Parsed marketplace manifest\n * @returns Selected plugin name (empty string if user cancelled)\n */\nexport async function promptPluginSelection(\n marketplace: MarketplaceManifest,\n execContext?: ExecutionContext\n): Promise<string> {\n const out = resolveOutput(execContext);\n const prompt = resolvePrompt(execContext);\n\n // Display marketplace description (header is shown by the spinner stop in orchestrator)\n if (marketplace.description) {\n out.message(marketplace.description);\n }\n\n const choices = marketplace.plugins\n .sort((a, b) => a.name.localeCompare(b.name))\n .map(plugin => ({\n title: plugin.name,\n value: plugin.name,\n description: plugin.description || ''\n }));\n\n try {\n const selectedPlugin = await prompt.select(\n 'Select a plugin to install:',\n choices\n );\n \n logger.info('User selected plugin', { selected: selectedPlugin });\n return selectedPlugin as string;\n } catch (error) {\n if (error instanceof UserCancellationError) {\n logger.info('User cancelled plugin selection');\n return '';\n }\n throw error;\n }\n}\n\n/**\n * Install mode type\n */\nexport type InstallMode = 'full' | 'partial';\n\n/**\n * Prompt user to choose between full plugin install or partial (individual resources).\n * \n * @param pluginName - Name of the plugin being installed\n * @returns Install mode ('full' or 'partial'), or empty string if cancelled\n */\nexport async function promptInstallMode(pluginName: string, execContext?: ExecutionContext): Promise<InstallMode | ''> {\n const prompt = resolvePrompt(execContext);\n try {\n const mode = await prompt.select(\n `How would you like to install ${pluginName}?`,\n [\n { \n title: 'Install full plugin', \n value: 'full' as const,\n description: 'Install all resources from this plugin' \n },\n { \n title: 'Select individual resources', \n value: 'partial' as const,\n description: 'Choose specific agents, skills, commands, etc.' \n }\n ]\n );\n \n logger.info('User selected install mode', { mode });\n return mode as InstallMode;\n } catch (error) {\n if (error instanceof UserCancellationError) {\n logger.info('User cancelled install mode selection');\n return '';\n }\n throw error;\n }\n}\n\n/**\n * Install individual resources from a plugin (partial install mode).\n * Discovers resources and prompts user to select which ones to install.\n * \n * @param pluginDir - Absolute path to plugin directory\n * @param pluginEntry - Marketplace plugin entry\n * @param context - Installation context with source metadata\n * @param repoRoot - Repository root path\n * @returns Command result\n */\nasync function installPluginPartial(\n pluginDir: string,\n pluginEntry: MarketplacePluginEntry,\n context: any,\n repoRoot: string\n): Promise<CommandResult> {\n const out = resolveOutput(context.execution);\n logger.info('Starting partial plugin installation', {\n plugin: pluginEntry.name,\n path: pluginDir\n });\n \n // Discover all resources with spinner\n const s = out.spinner();\n s.start('Discovering resources');\n \n const discovery = await discoverResources(pluginDir, repoRoot);\n \n // Stop spinner with completion message\n if (discovery.total === 0) {\n s.stop('No resources found');\n } else {\n s.stop(`${discovery.total} resource${discovery.total === 1 ? '' : 's'} discovered`);\n }\n \n // Check if any resources found\n if (discovery.total === 0) {\n out.warn('No installable resources found in this plugin');\n return {\n success: true,\n data: { installed: 0, skipped: 0 }\n };\n }\n \n // Prompt for resource selection\n const selected: SelectedResource[] = await promptResourceSelection(\n discovery,\n context.source.packageName || pluginEntry.name,\n context.source.version,\n resolveOutput(context.execution),\n resolvePrompt(context.execution)\n );\n \n if (selected.length === 0) {\n return {\n success: true,\n data: { installed: 0, skipped: 0 }\n };\n }\n \n // Convert selected resources to ResourceInstallationSpec format\n const resourceSpecs: ResourceInstallationSpec[] = selected.map(s => ({\n name: s.displayName,\n resourceType: s.resourceType,\n resourcePath: s.resourcePath,\n basePath: resolve(pluginDir),\n resourceKind: s.installKind,\n matchedBy: 'filename' as const,\n resourceVersion: s.version\n }));\n \n // Build resource contexts for installation\n const resourceContexts = buildResourceInstallContexts(\n context,\n resourceSpecs,\n repoRoot\n ).map(rc => {\n // Ensure path-based loader can resolve repo-relative resourcePath\n if (rc.source.type === 'path') {\n rc.source.localPath = repoRoot;\n }\n return rc;\n });\n \n // Run multi-context pipeline with grouped report for multi-resource selection\n const result = await runMultiContextPipeline(resourceContexts, {\n groupReport: true,\n groupReportPackageName: context.source.packageName || pluginEntry.name\n });\n \n return {\n success: result.success,\n error: result.error,\n data: {\n installed: result.data?.installed || 0,\n skipped: result.data?.skipped || 0\n }\n };\n}\n\n/**\n * Install selected plugins from a marketplace.\n * \n * @param marketplaceDir - Absolute path to cloned marketplace repository root\n * @param marketplace - Parsed marketplace manifest\n * @param selectedName - Name of plugin to install\n * @param installMode - Install mode ('full' or 'partial')\n * @param marketplaceGitUrl - Git URL of the marketplace repository\n * @param marketplaceGitRef - Git ref (branch/tag/sha) if specified\n * @param marketplaceCommitSha - Commit SHA of cached marketplace\n * @param options - Install options\n * @param cwd - Current working directory for installation\n */\nexport async function installMarketplacePlugins(\n marketplaceDir: string,\n marketplace: MarketplaceManifest,\n selectedName: string,\n installMode: InstallMode,\n marketplaceGitUrl: string,\n marketplaceGitRef: string | undefined,\n marketplaceCommitSha: string,\n options: InstallOptions,\n execContext: ExecutionContext,\n convenienceOptions?: { agents?: string[]; skills?: string[]; rules?: string[]; commands?: string[] }\n): Promise<CommandResult> {\n logger.info('Installing marketplace plugin', { \n marketplace: marketplace.name,\n plugin: selectedName,\n mode: installMode\n });\n \n const out = resolveOutput(execContext);\n \n const pluginEntry = marketplace.plugins.find(p => p.name === selectedName);\n if (!pluginEntry) {\n const error = `Plugin '${selectedName}' not found in marketplace`;\n logger.error(error, { marketplace: marketplace.name });\n out.error(`${selectedName}: ${error}`);\n return { success: false, error };\n }\n \n // Normalize the plugin source\n let normalizedSource: NormalizedPluginSource;\n try {\n normalizedSource = normalizePluginSource(pluginEntry.source, selectedName);\n } catch (error) {\n logger.error('Failed to normalize plugin source', { plugin: selectedName, error });\n const errorMsg = error instanceof Error ? error.message : 'Invalid source configuration';\n out.error(`${selectedName}: ${errorMsg}`);\n return { success: false, error: errorMsg };\n }\n \n // Install based on source type\n try {\n let installResult: CommandResult;\n \n if (isRelativePathSource(normalizedSource)) {\n installResult = await installRelativePathPlugin(\n marketplaceDir,\n marketplace,\n pluginEntry,\n normalizedSource,\n installMode,\n marketplaceGitUrl,\n marketplaceGitRef,\n marketplaceCommitSha,\n options,\n execContext,\n convenienceOptions\n );\n } else if (isGitSource(normalizedSource)) {\n installResult = await installGitPlugin(\n marketplace,\n pluginEntry,\n normalizedSource,\n installMode,\n options,\n execContext,\n convenienceOptions\n );\n } else {\n throw new Error(`Unsupported source type: ${normalizedSource.type}`);\n }\n \n return installResult;\n \n } catch (error) {\n logger.error('Failed to install plugin', { plugin: selectedName, error });\n const errorMsg = error instanceof Error ? error.message : String(error);\n out.error(`${selectedName}: ${errorMsg}`);\n return { success: false, error: errorMsg };\n }\n}\n\n/**\n * Install a plugin from a relative path within the marketplace repository.\n */\nasync function installRelativePathPlugin(\n marketplaceDir: string,\n marketplace: MarketplaceManifest,\n pluginEntry: MarketplacePluginEntry,\n normalizedSource: NormalizedPluginSource,\n installMode: InstallMode,\n marketplaceGitUrl: string,\n marketplaceGitRef: string | undefined,\n marketplaceCommitSha: string,\n options: InstallOptions,\n execContext: ExecutionContext,\n convenienceOptions?: { agents?: string[]; skills?: string[]; rules?: string[]; commands?: string[] }\n): Promise<CommandResult> {\n const out = resolveOutput(execContext);\n const pluginSubdir = normalizedSource.relativePath!;\n const pluginDir = join(marketplaceDir, pluginSubdir);\n \n // Validate plugin subdirectory exists (silent for partial mode)\n if (!(await exists(pluginDir))) {\n const error = `Path '${pluginSubdir}' does not exist in marketplace repository`;\n logger.error('Plugin path not found', { \n plugin: pluginEntry.name, \n path: pluginSubdir,\n fullPath: pluginDir\n });\n out.error(`${pluginEntry.name}: ${error}`);\n return { success: false, error };\n }\n \n const hasConvenienceOptions = Boolean(convenienceOptions?.agents?.length || convenienceOptions?.skills?.length || convenienceOptions?.rules?.length || convenienceOptions?.commands?.length);\n\n if (hasConvenienceOptions) {\n logger.info('Convenience filters active, bypassing full plugin validation', {\n plugin: pluginEntry.name,\n path: pluginSubdir\n });\n\n const ctx = await buildPathInstallContext(\n execContext,\n pluginDir,\n {\n ...options,\n sourceType: 'directory' as const\n }\n );\n\n ctx.source.gitSourceOverride = {\n gitUrl: marketplaceGitUrl,\n gitRef: marketplaceGitRef,\n gitPath: pluginSubdir\n };\n\n ctx.source.pluginMetadata = {\n isPlugin: true,\n marketplaceEntry: pluginEntry,\n marketplaceSource: {\n url: marketplaceGitUrl,\n commitSha: marketplaceCommitSha,\n pluginName: pluginEntry.name\n }\n };\n\n ctx.detectedBase = pluginDir;\n ctx.baseRelative = relative(marketplaceDir, pluginDir) || '.';\n\n // Set packageName for resource scoping (gh@owner/repo/path format)\n ctx.source.packageName = generateGitHubPackageName({\n gitUrl: marketplaceGitUrl,\n path: pluginSubdir\n });\n\n const resources = await resolveConvenienceResources(pluginDir, marketplaceDir, convenienceOptions ?? {});\n\n const resourceContexts = buildResourceInstallContexts(ctx, resources, marketplaceDir).map(rc => {\n if (rc.source.type === 'path') {\n rc.source.localPath = marketplaceDir;\n }\n return rc;\n });\n\n // Subsumption filtering is handled centrally by runMultiContextPipeline\n\n const multiResult = await runMultiContextPipeline(resourceContexts, {\n groupReport: true,\n groupReportPackageName: pluginEntry.name\n });\n return {\n success: multiResult.success,\n error: multiResult.error\n };\n }\n\n // Build path context for the already-cloned plugin directory\n const ctx = await buildPathInstallContext(\n execContext,\n pluginDir,\n {\n ...options,\n sourceType: 'directory' as const\n }\n );\n \n // Add git source override for manifest recording\n ctx.source.gitSourceOverride = {\n gitUrl: marketplaceGitUrl,\n gitRef: marketplaceGitRef,\n gitPath: pluginSubdir\n };\n \n // Add marketplace metadata to context\n ctx.source.pluginMetadata = {\n isPlugin: true,\n marketplaceEntry: pluginEntry,\n marketplaceSource: {\n url: marketplaceGitUrl,\n commitSha: marketplaceCommitSha,\n pluginName: pluginEntry.name\n }\n };\n\n // Branch based on install mode\n if (installMode === 'partial') {\n // Partial install: prompt for resource selection\n logger.info('Using partial install mode for relative path plugin', {\n plugin: pluginEntry.name,\n path: pluginSubdir\n });\n \n ctx.detectedBase = pluginDir;\n ctx.baseRelative = relative(marketplaceDir, pluginDir) || '.';\n \n const repoRoot = marketplaceDir;\n return await installPluginPartial(pluginDir, pluginEntry, ctx, repoRoot);\n }\n\n // Full install: validate and install entire plugin\n logger.info('Using full install mode for relative path plugin', {\n plugin: pluginEntry.name,\n path: pluginSubdir\n });\n\n // Validate plugin structure with marketplace context\n const detection = await detectPluginWithMarketplace(pluginDir, pluginEntry);\n \n if (!detection.isPlugin) {\n const strictInfo = pluginEntry.strict === false \n ? ' Set \"strict\": false in marketplace entry if this plugin is defined entirely in marketplace.json.'\n : '';\n const error = `Path '${pluginSubdir}' does not contain a valid plugin.${strictInfo}`;\n logger.error('Invalid plugin structure', { \n plugin: pluginEntry.name, \n path: pluginSubdir,\n strict: pluginEntry.strict\n });\n out.error(`${pluginEntry.name}: ${error}`);\n return { success: false, error };\n }\n \n // For plugins with plugin.json, validate it's parseable\n if (detection.manifestPath) {\n if (!(await validatePluginManifest(detection.manifestPath))) {\n const error = `Invalid plugin manifest in '${pluginSubdir}' (cannot parse JSON)`;\n logger.error('Invalid plugin manifest', { plugin: pluginEntry.name });\n out.error(`${pluginEntry.name}: ${error}`);\n return { success: false, error };\n }\n }\n \n // Update context with detection results\n ctx.source.pluginMetadata = {\n ...ctx.source.pluginMetadata,\n pluginType: detection.type as any,\n manifestPath: detection.manifestPath\n };\n\n // Subsumption is handled centrally by the pipeline's subsumption phase\n\n // Run the unified pipeline \u2014 it handles its own spinner and reports results\n const pipelineResult = await runUnifiedInstallPipeline(ctx);\n \n if (!pipelineResult.success) {\n const installedName = ctx.source.packageName || pluginEntry.name;\n out.error(`${installedName}: ${pipelineResult.error || 'Unknown error'}`);\n }\n \n return {\n success: pipelineResult.success,\n error: pipelineResult.error\n };\n}\n\n/**\n * Install a plugin from an external git repository.\n */\nasync function installGitPlugin(\n marketplace: MarketplaceManifest,\n pluginEntry: MarketplacePluginEntry,\n normalizedSource: NormalizedPluginSource,\n installMode: InstallMode,\n options: InstallOptions,\n execContext: ExecutionContext,\n convenienceOptions?: { agents?: string[]; skills?: string[]; rules?: string[]; commands?: string[] }\n): Promise<CommandResult> {\n const out = resolveOutput(execContext);\n const gitUrl = normalizedSource.gitUrl!;\n const gitRef = normalizedSource.gitRef;\n const gitPath = normalizedSource.gitPath;\n \n logger.info('Installing git plugin', {\n plugin: pluginEntry.name,\n gitUrl,\n gitRef,\n gitPath\n });\n \n // Build git context\n const ctx = await buildGitInstallContext(\n execContext,\n gitUrl,\n {\n ...options,\n gitRef,\n gitPath\n }\n );\n \n // Add marketplace metadata for proper scoping\n ctx.source.pluginMetadata = {\n isPlugin: true,\n marketplaceEntry: pluginEntry\n };\n\n const hasConvenienceOptions = Boolean(convenienceOptions?.agents?.length || convenienceOptions?.skills?.length || convenienceOptions?.rules?.length || convenienceOptions?.commands?.length);\n if (hasConvenienceOptions) {\n const loader = getLoaderForSource(ctx.source);\n const loaded = await loader.load(ctx.source, options, execContext);\n\n ctx.source.packageName = loaded.packageName;\n ctx.source.version = loaded.version;\n ctx.source.contentRoot = loaded.contentRoot;\n ctx.source.pluginMetadata = {\n ...loaded.pluginMetadata,\n ...(ctx.source.pluginMetadata ?? {}),\n marketplaceEntry: pluginEntry\n };\n\n if (loaded.sourceMetadata?.commitSha) {\n (ctx.source as any)._commitSha = loaded.sourceMetadata.commitSha;\n }\n\n if (loaded.sourceMetadata?.baseDetection) {\n applyBaseDetection(ctx, loaded);\n }\n\n const basePath = ctx.detectedBase || loaded.contentRoot || execContext.targetDir;\n const repoRoot = loaded.sourceMetadata?.repoPath || loaded.contentRoot || basePath;\n const resources = await resolveConvenienceResources(basePath, repoRoot, convenienceOptions ?? {});\n\n const resourceContexts = buildResourceInstallContexts(ctx, resources, repoRoot);\n const multiResult = await runMultiContextPipeline(resourceContexts, {\n groupReport: true,\n groupReportPackageName: ctx.source.packageName || pluginEntry.name\n });\n return {\n success: multiResult.success,\n error: multiResult.error\n };\n }\n\n // Branch based on install mode\n if (installMode === 'partial') {\n // Partial install: load plugin, discover resources, prompt for selection\n logger.info('Using partial install mode for git plugin', {\n plugin: pluginEntry.name\n });\n\n const loader = getLoaderForSource(ctx.source);\n const loaded = await loader.load(ctx.source, options, execContext);\n\n ctx.source.packageName = loaded.packageName;\n ctx.source.version = loaded.version;\n ctx.source.contentRoot = loaded.contentRoot;\n ctx.source.pluginMetadata = {\n ...loaded.pluginMetadata,\n ...(ctx.source.pluginMetadata ?? {}),\n marketplaceEntry: pluginEntry\n };\n\n if (loaded.sourceMetadata?.commitSha) {\n (ctx.source as any)._commitSha = loaded.sourceMetadata.commitSha;\n }\n\n if (loaded.sourceMetadata?.baseDetection) {\n applyBaseDetection(ctx, loaded);\n }\n\n const basePath = ctx.detectedBase || loaded.contentRoot || execContext.targetDir;\n const repoRoot = loaded.sourceMetadata?.repoPath || loaded.contentRoot || basePath;\n \n return await installPluginPartial(basePath, pluginEntry, ctx, repoRoot);\n }\n \n // Full install: run unified pipeline \u2014 it handles its own spinner and reports results\n logger.info('Using full install mode for git plugin', {\n plugin: pluginEntry.name\n });\n\n // Subsumption is handled centrally by the pipeline's subsumption phase\n\n const pipelineResult = await runUnifiedInstallPipeline(ctx);\n \n if (!pipelineResult.success) {\n const installedName = ctx.source.packageName || pluginEntry.name;\n out.error(`${installedName}: ${pipelineResult.error || 'Unknown error'}`);\n }\n \n return {\n success: pipelineResult.success,\n error: pipelineResult.error\n };\n}\n\n/**\n * Validate that requested plugin names exist in marketplace.\n *\n * @param marketplace - Parsed marketplace manifest\n * @param requestedPlugins - Array of plugin names to validate\n * @returns Object with valid and invalid plugin name arrays\n */\nexport function validatePluginNames(\n marketplace: MarketplaceManifest,\n requestedPlugins: string[]\n): { valid: string[]; invalid: string[] } {\n const availableNames = new Set(marketplace.plugins.map(p => p.name));\n\n const valid: string[] = [];\n const invalid: string[] = [];\n\n for (const name of requestedPlugins) {\n if (availableNames.has(name)) {\n valid.push(name);\n } else {\n invalid.push(name);\n }\n }\n\n return { valid, invalid };\n}\n\n/**\n * Resolved plugin info for resource discovery (used by --interactive).\n */\nexport interface ResolvedPluginInfo {\n pluginEntry: MarketplacePluginEntry;\n context: InstallationContext;\n basePath: string;\n repoRoot: string;\n}\n\n/**\n * Resolve plugin content roots for resource discovery.\n * Loads each plugin's source and returns resolved paths and contexts\n * without installing anything.\n *\n * @param marketplaceDir - Absolute path to cloned marketplace repository root\n * @param marketplace - Parsed marketplace manifest\n * @param pluginNames - Plugin names to resolve\n * @param marketplaceGitUrl - Git URL of the marketplace repository\n * @param marketplaceGitRef - Git ref (branch/tag/sha) if specified\n * @param marketplaceCommitSha - Commit SHA of cached marketplace\n * @param options - Install options\n * @param execContext - Execution context\n * @returns Array of resolved plugin info objects\n */\nexport async function resolvePluginContentRoots(\n marketplaceDir: string,\n marketplace: MarketplaceManifest,\n pluginNames: string[],\n marketplaceGitUrl: string,\n marketplaceGitRef: string | undefined,\n marketplaceCommitSha: string,\n options: InstallOptions,\n execContext: ExecutionContext\n): Promise<ResolvedPluginInfo[]> {\n const results: ResolvedPluginInfo[] = [];\n\n for (const name of pluginNames) {\n const pluginEntry = marketplace.plugins.find(p => p.name === name);\n if (!pluginEntry) continue;\n\n let normalizedSource: NormalizedPluginSource;\n try {\n normalizedSource = normalizePluginSource(pluginEntry.source, name);\n } catch (error) {\n logger.warn('Failed to normalize plugin source for list', { plugin: name, error });\n continue;\n }\n\n try {\n if (isRelativePathSource(normalizedSource)) {\n const pluginSubdir = normalizedSource.relativePath!;\n const pluginDir = join(marketplaceDir, pluginSubdir);\n\n if (!(await exists(pluginDir))) {\n logger.warn('Plugin path not found for list', { plugin: name, path: pluginSubdir });\n continue;\n }\n\n const ctx = await buildPathInstallContext(execContext, pluginDir, {\n ...options,\n sourceType: 'directory' as const\n });\n\n ctx.source.gitSourceOverride = {\n gitUrl: marketplaceGitUrl,\n gitRef: marketplaceGitRef,\n gitPath: pluginSubdir\n };\n\n ctx.source.pluginMetadata = {\n isPlugin: true,\n marketplaceEntry: pluginEntry,\n marketplaceSource: {\n url: marketplaceGitUrl,\n commitSha: marketplaceCommitSha,\n pluginName: pluginEntry.name\n }\n };\n\n ctx.detectedBase = pluginDir;\n ctx.baseRelative = relative(marketplaceDir, pluginDir) || '.';\n\n results.push({\n pluginEntry,\n context: ctx,\n basePath: pluginDir,\n repoRoot: marketplaceDir\n });\n } else if (isGitSource(normalizedSource)) {\n const ctx = await buildGitInstallContext(execContext, normalizedSource.gitUrl!, {\n ...options,\n gitRef: normalizedSource.gitRef,\n gitPath: normalizedSource.gitPath\n });\n\n ctx.source.pluginMetadata = {\n isPlugin: true,\n marketplaceEntry: pluginEntry\n };\n\n const loader = getLoaderForSource(ctx.source);\n const loaded = await loader.load(ctx.source, options, execContext);\n\n ctx.source.packageName = loaded.packageName;\n ctx.source.version = loaded.version;\n ctx.source.contentRoot = loaded.contentRoot;\n ctx.source.pluginMetadata = {\n ...loaded.pluginMetadata,\n ...(ctx.source.pluginMetadata ?? {}),\n marketplaceEntry: pluginEntry\n };\n\n if (loaded.sourceMetadata?.baseDetection) {\n applyBaseDetection(ctx, loaded);\n }\n\n const basePath = ctx.detectedBase || loaded.contentRoot || execContext.targetDir;\n const repoRoot = loaded.sourceMetadata?.repoPath || loaded.contentRoot || basePath;\n\n results.push({\n pluginEntry,\n context: ctx,\n basePath,\n repoRoot\n });\n }\n } catch (error) {\n logger.warn('Failed to resolve plugin for list', { plugin: name, error });\n continue;\n }\n }\n\n return results;\n}\n", "import { basename, join, relative } from 'path';\nimport type { InstallOptions, ExecutionContext } from '../../../types/index.js';\nimport type { InstallationContext, PackageSource } from './context.js';\nimport { classifyPackageInput } from '../package-input.js';\nimport { normalizePlatforms } from '../../platform/platform-mapper.js';\nimport { parsePackageYml } from '../../../utils/package-yml.js';\nimport { getLocalPackageYmlPath, getLocalOpenPackageDir } from '../../../utils/paths.js';\nimport { exists } from '../../../utils/fs.js';\nimport { createWorkspacePackageYml, ensureLocalOpenPackageStructure } from '../../package-management.js';\nimport { logger } from '../../../utils/logger.js';\nimport type { ResourceInstallationSpec } from '../convenience-matchers.js';\n\n/**\n * Result of building contexts for bulk install.\n * Workspace root context is built here; dependency resolution is handled\n * separately by the wave engine in runRecursiveBulkInstall.\n */\nexport interface BulkInstallContextsResult {\n workspaceContext: InstallationContext | null;\n hasDependencies: boolean;\n}\n\n/**\n * Build context for registry-based installation\n */\nexport async function buildRegistryInstallContext(\n execContext: ExecutionContext,\n packageName: string,\n options: InstallOptions & { version?: string; registryPath?: string }\n): Promise<InstallationContext> {\n const source: PackageSource = {\n type: 'registry',\n packageName,\n version: options.version,\n registryPath: options.registryPath\n };\n \n return {\n execution: execContext,\n targetDir: execContext.targetDir,\n source,\n mode: 'install',\n options,\n platforms: normalizePlatforms(options.platforms) || [],\n installScope: 'full',\n resolvedPackages: [],\n warnings: [],\n errors: []\n };\n}\n\n/**\n * Build context for path-based installation\n */\nexport async function buildPathInstallContext(\n execContext: ExecutionContext,\n sourcePath: string,\n options: InstallOptions & { sourceType: 'directory' | 'tarball' }\n): Promise<InstallationContext> {\n // Will need to load package to get name\n // For now, we'll populate after loading\n const source: PackageSource = {\n type: 'path',\n packageName: '', // Populated after loading\n localPath: sourcePath,\n sourceType: options.sourceType\n };\n \n return {\n execution: execContext,\n targetDir: execContext.targetDir,\n source,\n mode: 'install',\n options,\n platforms: normalizePlatforms(options.platforms) || [],\n installScope: 'full',\n resolvedPackages: [],\n warnings: [],\n errors: []\n };\n}\n\n/**\n * Build context for git-based installation\n */\nexport async function buildGitInstallContext(\n execContext: ExecutionContext,\n gitUrl: string,\n options: InstallOptions & { gitRef?: string; gitPath?: string }\n): Promise<InstallationContext> {\n const source: PackageSource = {\n type: 'git',\n packageName: '', // Populated after loading\n gitUrl,\n gitRef: options.gitRef,\n gitPath: options.gitPath\n };\n \n return {\n execution: execContext,\n targetDir: execContext.targetDir,\n source,\n mode: 'install',\n options,\n platforms: normalizePlatforms(options.platforms) || [],\n installScope: 'full',\n resolvedPackages: [],\n warnings: [],\n errors: []\n };\n}\n\n/**\n * Build context for workspace root installation\n * Used when installing/applying workspace-level files from .openpackage/\n */\nexport async function buildWorkspaceRootInstallContext(\n execContext: ExecutionContext,\n options: InstallOptions,\n mode: 'install' | 'apply' = 'install'\n): Promise<InstallationContext | null> {\n const cwd = execContext.targetDir;\n \n // Ensure .openpackage/ structure exists\n await ensureLocalOpenPackageStructure(cwd);\n \n // Create workspace manifest if it doesn't exist\n await createWorkspacePackageYml(cwd);\n \n const openpackageDir = getLocalOpenPackageDir(cwd);\n const packageYmlPath = getLocalPackageYmlPath(cwd);\n \n // Check if workspace manifest exists\n if (!(await exists(packageYmlPath))) {\n return null;\n }\n \n // Load workspace manifest\n let config;\n try {\n config = await parsePackageYml(packageYmlPath);\n } catch (error) {\n logger.warn(`Failed to read workspace manifest: ${error}`);\n return null;\n }\n \n // Use workspace directory name as package name if not specified in manifest\n const packageName = config.name || basename(cwd);\n \n const source: PackageSource = {\n type: 'workspace',\n packageName,\n version: config.version,\n contentRoot: openpackageDir\n };\n \n return {\n execution: execContext,\n targetDir: execContext.targetDir,\n source,\n mode,\n options: mode === 'apply' ? { ...options, force: true } : options,\n platforms: normalizePlatforms(options.platforms) || [],\n installScope: 'full',\n resolvedPackages: [],\n warnings: [],\n errors: []\n };\n}\n\n\n\n/**\n * Build context from package input (auto-detect type)\n */\nexport async function buildInstallContext(\n execContext: ExecutionContext,\n packageInput: string | undefined,\n options: InstallOptions\n): Promise<InstallationContext | InstallationContext[] | BulkInstallContextsResult> {\n // No input = bulk install (returns workspace + dependency contexts separately)\n if (!packageInput) {\n return buildBulkInstallContexts(execContext, options);\n }\n \n // Classify input to determine source type (use sourceCwd for input resolution)\n const classification = await classifyPackageInput(packageInput, execContext.sourceCwd);\n \n switch (classification.type) {\n case 'registry':\n return buildRegistryInstallContext(execContext, classification.name!, options);\n \n case 'directory':\n case 'tarball':\n return buildPathInstallContext(execContext, classification.resolvedPath!, {\n ...options,\n sourceType: classification.type\n });\n \n case 'git':\n return buildGitInstallContext(execContext, classification.gitUrl!, {\n ...options,\n gitRef: classification.gitRef,\n gitPath: classification.gitPath\n });\n \n default:\n throw new Error(`Unknown package input type: ${classification.type}`);\n }\n}\n\n/**\n * Build contexts for bulk installation.\n * Returns workspace root context and a flag indicating whether the manifest declares dependencies.\n * Actual dependency resolution and installation is handled by the wave engine in runRecursiveBulkInstall.\n */\nasync function buildBulkInstallContexts(\n execContext: ExecutionContext,\n options: InstallOptions\n): Promise<BulkInstallContextsResult> {\n const cwd = execContext.targetDir;\n\n // Build workspace root context (run as distinct stage before dependencies)\n const workspaceContext = await buildWorkspaceRootInstallContext(execContext, options, 'install');\n\n // Ensure workspace manifest exists before reading\n await createWorkspacePackageYml(cwd);\n\n // Check whether the manifest declares any dependencies (for empty-manifest messaging)\n const opkgYmlPath = getLocalPackageYmlPath(cwd);\n const opkgYml = await parsePackageYml(opkgYmlPath);\n\n const deps = ((opkgYml as any).packages ?? (opkgYml as any).dependencies ?? []) as any[];\n const devDeps = (((opkgYml as any).devDependencies ?? (opkgYml as any)['dev-dependencies'] ?? []) as any[]);\n const hasDependencies = [...deps, ...devDeps].filter(Boolean).length > 0;\n\n return { workspaceContext: workspaceContext ?? null, hasDependencies };\n}\n\n/**\n * Build context from a ResourceSpec (Phase 3: Resource Model)\n */\nexport async function buildResourceInstallContext(\n execContext: ExecutionContext,\n resourceSpec: any, // ResourceSpec from resource-arg-parser\n options: InstallOptions\n): Promise<InstallationContext> {\n let source: PackageSource;\n \n switch (resourceSpec.type) {\n case 'github-url':\n case 'github-shorthand':\n // Git source with resource path\n source = {\n type: 'git',\n packageName: '', // Populated after loading\n gitUrl: resourceSpec.gitUrl!,\n gitRef: resourceSpec.ref,\n // IMPORTANT: In resource-mode, `resourceSpec.path` represents a resource filter\n // (file or directory) within the repo, NOT a git subdirectory to clone into.\n // `gitPath` is reserved for \"package lives in subdirectory\" semantics (legacy/manifest).\n resourcePath: resourceSpec.path // Store resource path for base detection + scoping\n };\n break;\n \n case 'registry':\n // Registry source with optional path\n source = {\n type: 'registry',\n packageName: resourceSpec.name!,\n version: resourceSpec.version,\n resourcePath: resourceSpec.path\n };\n break;\n \n case 'filepath':\n // Local path source\n const absolutePath = resourceSpec.absolutePath!;\n const relativePath = relative(execContext.sourceCwd, absolutePath).replace(/\\\\/g, '/');\n const resourcePath = relativePath.startsWith('..') ? basename(absolutePath) : relativePath;\n source = {\n type: 'path',\n packageName: '', // Populated after loading\n localPath: absolutePath,\n sourceType: resourceSpec.isDirectory ? 'directory' : 'tarball',\n resourcePath\n };\n break;\n \n default:\n throw new Error(`Unknown resource type: ${resourceSpec.type}`);\n }\n \n return {\n execution: execContext,\n targetDir: execContext.targetDir,\n source,\n mode: 'install',\n options,\n platforms: normalizePlatforms(options.platforms) || [],\n installScope: 'full', // May be narrowed to 'subset' during path scoping\n resolvedPackages: [],\n warnings: [],\n errors: []\n };\n}\n\nfunction buildResourceMatchedPattern(\n resourceSpec: ResourceInstallationSpec,\n repoRoot: string,\n basePath: string\n): string | undefined {\n const absoluteResourcePath = join(repoRoot, resourceSpec.resourcePath);\n const relativeToBase = relative(basePath, absoluteResourcePath)\n .replace(/\\\\/g, '/')\n .replace(/^\\.\\/?/, '');\n\n if (!relativeToBase) {\n return undefined;\n }\n\n if (resourceSpec.resourceKind === 'directory') {\n const normalized = relativeToBase.replace(/\\/$/, '');\n return `${normalized}/**`;\n }\n\n return relativeToBase;\n}\n\n/**\n * Prepare resource contexts for multi-resource pipeline: set localPath\n * for path sources so the pipeline uses the correct root.\n */\nexport function prepareResourceContextsForMultiInstall(\n contexts: InstallationContext[],\n repoRoot: string\n): InstallationContext[] {\n return contexts.map(rc => {\n if (rc.source.type === 'path') {\n rc.source.localPath = repoRoot;\n }\n return rc;\n });\n}\n\n/**\n * Build multiple contexts for resource-centric installations.\n * For single-file installs from plugins, scopes the package name to the resource path\n * so the workspace index key is e.g. gh@user/repo/plugin/agents/foo.md rather than the plugin root.\n * \n * All contexts produced here are 'subset' scope -- they install a filtered set of files\n * from the package, not the entire package.\n */\nexport function buildResourceInstallContexts(\n baseContext: InstallationContext,\n resourceSpecs: ResourceInstallationSpec[],\n repoRoot: string\n): InstallationContext[] {\n const detectedBase = baseContext.detectedBase ?? baseContext.source.contentRoot ?? baseContext.targetDir;\n const baseRelative = baseContext.baseRelative ?? (relative(repoRoot, detectedBase) || '.');\n\n return resourceSpecs.map(spec => {\n const effectiveBase = baseContext.detectedBase ?? spec.basePath;\n const matchedPattern = buildResourceMatchedPattern(spec, repoRoot, effectiveBase) ?? baseContext.matchedPattern;\n\n // For single-file installs, scope the package name so index key and manifest are e.g.\n // gh@user/repo/plugins/feature-dev/agents/code-architect.md (not plugin root)\n const isSingleFile = Boolean(\n matchedPattern &&\n !matchedPattern.includes('*') &&\n !matchedPattern.includes('?') &&\n !matchedPattern.includes('[')\n );\n const baseName = baseContext.source.packageName;\n const scopedName = isSingleFile ? `${baseName}/${matchedPattern}` : baseName;\n\n const source: PackageSource = {\n ...baseContext.source,\n packageName: scopedName,\n resourcePath: spec.resourcePath,\n resourceVersion: spec.resourceVersion\n };\n\n let resolvedPackages = baseContext.resolvedPackages;\n if (isSingleFile && baseContext.resolvedPackages.length > 0) {\n resolvedPackages = baseContext.resolvedPackages.map(pkg => ({\n ...pkg,\n name: pkg.isRoot ? scopedName : pkg.name\n }));\n } else if (baseContext.resolvedPackages.length === 0) {\n resolvedPackages = [];\n }\n\n return {\n ...baseContext,\n source,\n resolvedPackages,\n warnings: [],\n errors: [],\n detectedBase: effectiveBase,\n baseRelative: baseRelative === '' ? '.' : baseRelative,\n baseSource: baseContext.baseSource,\n matchedPattern,\n installScope: 'subset'\n };\n });\n}\n\n\n", "import { resolve, isAbsolute } from 'path';\nimport { exists } from '../../utils/fs.js';\nimport { isValidPackageDirectory } from '../package-context.js';\nimport { parsePackageInstallSpec } from '../../utils/package-name.js';\nimport { ValidationError } from '../../utils/errors.js';\nimport { detectGitSource } from '../../utils/git-url-detection.js';\nimport { logger } from '../../utils/logger.js';\nimport {\n resolvePackageByName,\n type PackageSourceCandidate,\n type SourceResolutionInfo\n} from '../package-name-resolution.js';\nimport { detectPluginType } from './plugin-detector.js';\nimport { DIR_PATTERNS, FILE_PATTERNS, CLAUDE_PLUGIN_PATHS } from '../../constants/index.js';\n\nexport type PackageInputType = 'registry' | 'directory' | 'tarball' | 'git';\n\nexport interface PackageInputClassification {\n type: PackageInputType;\n \n // For 'registry' type\n name?: string;\n version?: string;\n registryPath?: string;\n\n // For 'git' type\n gitUrl?: string;\n gitRef?: string;\n gitPath?: string;\n \n // For 'directory' or 'tarball' types\n resolvedPath?: string; // Absolute path\n \n // For version-aware resolution metadata\n sourceComparisonInfo?: SourceResolutionInfo;\n}\n\n// Re-export types from package-name-resolution for backward compatibility\nexport type { PackageSourceCandidate, SourceResolutionInfo as SourceComparisonInfo };\n\n/**\n * Classify whether input is a registry package name, local directory, or tarball.\n * \n * Detection order:\n * 1. Ends with .tgz or .tar.gz AND file exists -> 'tarball'\n * 2. Starts with /, ./, ../, or is . AND isValidPackageDirectory -> 'directory'\n * 3. Otherwise -> parse as registry name via parsePackageInstallSpec\n * \n * @param raw - The raw input string from the user\n * @param cwd - Current working directory for resolving relative paths\n * @returns Classification of the input type and relevant information\n */\nexport async function classifyPackageInput(\n raw: string,\n cwd: string = process.cwd()\n): Promise<PackageInputClassification> {\n // Check for git sources first (new detection system)\n const gitSpec = detectGitSource(raw);\n if (gitSpec) {\n return {\n type: 'git',\n gitUrl: gitSpec.url,\n gitRef: gitSpec.ref,\n gitPath: gitSpec.path\n };\n }\n\n // Check for tarball file extension\n const isTarballPath = raw.endsWith(FILE_PATTERNS.TGZ_FILES) || raw.endsWith(FILE_PATTERNS.TAR_GZ_FILES);\n \n // Check if input looks like a path\n const looksLikePath = raw.startsWith('/') ||\n raw.startsWith('./') ||\n raw.startsWith('../') ||\n raw === '.' ||\n raw.startsWith('~/') ||\n raw.startsWith(DIR_PATTERNS.OPENPACKAGE + '/') || // Include .openpackage paths\n (isAbsolute(raw) && !raw.includes('@'));\n \n if (isTarballPath || looksLikePath) {\n const resolvedPath = isAbsolute(raw) ? raw : resolve(cwd, raw);\n \n if (isTarballPath) {\n if (await exists(resolvedPath)) {\n return { type: 'tarball', resolvedPath };\n }\n // File doesn't exist - fall through to treat as registry name\n // (will error later with \"file not found\" or \"package not found\")\n }\n \n // Check if it's a valid package directory OR a plugin\n const isValid = await isValidPackageDirectory(resolvedPath);\n const pluginDetection = await detectPluginType(resolvedPath);\n \n if (isValid || pluginDetection.isPlugin) {\n return { type: 'directory', resolvedPath };\n }\n \n // Path exists but isn't a valid package? Error\n if (await exists(resolvedPath)) {\n throw new ValidationError(\n `Path '${raw}' exists but is not a valid OpenPackage directory or Claude Code plugin. ` +\n `Valid packages must contain ${FILE_PATTERNS.OPENPACKAGE_YML} or ${CLAUDE_PLUGIN_PATHS.PLUGIN_MANIFEST}`\n );\n }\n }\n \n // Check if this looks like a simple package name (not an explicit path)\n // Search in workspace/global/registry packages using shared resolution\n if (!looksLikePath && !isTarballPath) {\n const resolution = await resolvePackageByName({\n cwd,\n packageName: raw,\n checkCwd: false, // Install doesn't prioritize CWD\n searchWorkspace: true, // Search workspace packages\n searchGlobal: true, // Search global packages \n searchRegistry: true // Search registry (install needs this)\n });\n\n if (resolution.found && resolution.path) {\n logger.info('Resolved package name to path for install', { \n packageName: raw, \n path: resolution.path,\n sourceType: resolution.sourceType\n });\n \n return { \n type: 'directory', \n resolvedPath: resolution.path,\n sourceComparisonInfo: resolution.resolutionInfo\n };\n }\n }\n \n // Treat as registry package name\n try {\n const { name, version, registryPath } = parsePackageInstallSpec(raw);\n return { type: 'registry', name, version, registryPath };\n } catch (error) {\n // If parsing fails, still return registry type - let downstream handle the error\n return { type: 'registry', name: raw };\n }\n}\n\n\n", "/**\n * Modern git source detection and parsing.\n * Supports:\n * - GitHub shorthand (gh@owner/repo[/path])\n * - GitHub web URLs (https://github.com/owner/repo/tree/ref/path)\n * - Generic git URLs with hash fragments (#ref&path=x)\n * - Legacy prefixes (github:, git:) with deprecation warnings\n */\n\nimport { ValidationError } from './errors.js';\nimport { logger } from './logger.js';\n\n/**\n * Parsed git source specification.\n */\nexport interface GitSpec {\n url: string; // Normalized git URL\n ref?: string; // Branch/tag/commit\n path?: string; // Subdirectory within repo\n}\n\n/**\n * Detect and parse git sources from user input.\n * Returns null if input is not a git source.\n * \n * Detection priority (by user intent):\n * 1. GitHub shorthand (gh@) - new explicit syntax\n * 2. URL protocols (https://, http://, git://, git@) - direct URLs\n * 3. Git file extension (.git) - any URL ending with .git\n * 4. Legacy prefixes (github:, git:) - deprecated, with warnings\n * \n * Implementation order (to avoid conflicts):\n * - Legacy prefixes checked first (they mask underlying URL patterns)\n * - GitHub shorthand (explicit new syntax)\n * - GitHub URLs (specific pattern matching)\n * - Generic git URLs (catch-all)\n * \n * @param input - Raw user input\n * @returns Parsed GitSpec or null if not a git source\n */\nexport function detectGitSource(input: string): GitSpec | null {\n if (!input || typeof input !== 'string') {\n return null;\n }\n\n // Check legacy prefixes first (they hide the URL underneath)\n const legacy = parseLegacyPrefix(input);\n if (legacy) {\n return legacy;\n }\n\n // GitHub shorthand (gh@owner/repo)\n const ghShorthand = parseGitHubShorthand(input);\n if (ghShorthand) {\n return ghShorthand;\n }\n\n // GitHub URLs (extract ref/path from URL structure)\n const ghUrl = parseGitHubUrl(input);\n if (ghUrl) {\n return ghUrl;\n }\n\n // Generic git URLs (with hash fragments)\n const genericGit = parseGenericGitUrl(input);\n if (genericGit) {\n return genericGit;\n }\n\n return null;\n}\n\n/**\n * Parse GitHub shorthand format: gh@owner/repo[/path]\n * \n * Examples:\n * - gh@anthropics/claude-code\n * - gh@user/repo/plugins/x\n * \n * @param input - Raw input string\n * @returns GitSpec or null if not GitHub shorthand\n */\nexport function parseGitHubShorthand(input: string): GitSpec | null {\n if (!input.startsWith('gh@')) {\n return null;\n }\n\n const remainder = input.slice(3); // Remove 'gh@'\n \n if (!remainder) {\n throw new ValidationError(\n `Invalid GitHub shorthand 'gh@'. Expected format: gh@owner/repo[/path]\\n\\n` +\n `Examples:\\n` +\n ` gh@anthropics/claude-code\\n` +\n ` gh@user/repo/plugins/my-plugin`\n );\n }\n\n const segments = remainder.split('/').filter(s => s.length > 0);\n \n if (segments.length < 2) {\n throw new ValidationError(\n `Invalid GitHub shorthand '${input}'. Expected format: gh@owner/repo[/path]\\n\\n` +\n `Examples:\\n` +\n ` gh@anthropics/claude-code\\n` +\n ` gh@user/repo/plugins/my-plugin`\n );\n }\n\n const owner = segments[0];\n const repo = segments[1];\n\n const url = normalizeGitHubUrl(owner, repo);\n const path = segments.length > 2 ? segments.slice(2).join('/') : undefined;\n\n logger.debug('Parsed GitHub shorthand', { input, owner, repo, path, url });\n\n return {\n url,\n ref: undefined, // GitHub shorthand always uses default branch\n path\n };\n}\n\n/**\n * Parse GitHub web URLs.\n * \n * Supported formats:\n * - https://github.com/owner/repo\n * - https://github.com/owner/repo.git\n * - https://github.com/owner/repo/tree/ref\n * - https://github.com/owner/repo/tree/ref/path\n * \n * @param input - Raw input string\n * @returns GitSpec or null if not a GitHub URL\n */\nexport function parseGitHubUrl(input: string): GitSpec | null {\n let url: URL;\n \n try {\n url = new URL(input);\n } catch {\n return null;\n }\n\n // Must be github.com\n if (url.hostname !== 'github.com') {\n return null;\n }\n\n const segments = url.pathname.split('/').filter(s => s.length > 0);\n \n if (segments.length < 2) {\n throw new ValidationError(\n `Invalid GitHub URL. Expected: https://github.com/owner/repo\\n\\n` +\n `Got: ${input}`\n );\n }\n\n const owner = segments[0];\n let repo = segments[1];\n \n // Strip .git suffix from repo if present\n if (repo.endsWith('.git')) {\n repo = repo.slice(0, -4);\n }\n\n const normalizedUrl = normalizeGitHubUrl(owner, repo);\n let ref: string | undefined;\n let path: string | undefined;\n\n // Check for /tree/ or /blob/ paths\n if (segments.length > 2) {\n const pathType = segments[2];\n \n if (pathType === 'blob') {\n throw new ValidationError(\n `Cannot install from single file URL\\n\\n` +\n `You provided:\\n` +\n ` ${input}\\n\\n` +\n `To install a package, use:\\n` +\n ` \u2022 Repository: https://github.com/${owner}/${repo}\\n` +\n ` \u2022 With branch: https://github.com/${owner}/${repo}/tree/main\\n` +\n ` \u2022 Subdirectory: https://github.com/${owner}/${repo}/tree/main/plugins/x\\n` +\n ` \u2022 Shorthand: gh@${owner}/${repo}/plugins/x`\n );\n }\n \n if (pathType === 'tree') {\n if (segments.length < 4) {\n throw new ValidationError(\n `Invalid GitHub URL. Ref is required after /tree/\\n\\n` +\n `Got: ${input}\\n\\n` +\n `Expected: https://github.com/${owner}/${repo}/tree/<ref>[/path]`\n );\n }\n \n ref = decodeURIComponent(segments[3]);\n \n // Path is everything after the ref\n if (segments.length > 4) {\n path = segments.slice(4).map(s => decodeURIComponent(s)).join('/');\n }\n }\n }\n\n logger.debug('Parsed GitHub URL', { input, owner, repo, ref, path, url: normalizedUrl });\n\n return {\n url: normalizedUrl,\n ref,\n path\n };\n}\n\n/**\n * Parse generic git URLs with hash fragments.\n * \n * Supported formats:\n * - https://host/path.git\n * - git://host/path\n * - git@host:path.git\n * - <any-git-url>#<ref>\n * - <any-git-url>#<ref>&path=<path>\n * - <any-git-url>#path=<path>\n * \n * @param input - Raw input string\n * @returns GitSpec or null if not a git URL\n */\nexport function parseGenericGitUrl(input: string): GitSpec | null {\n if (!isGitUrl(input)) {\n return null;\n }\n\n // Split by # to separate base URL and hash fragment\n const [baseUrl, hashPart] = input.split('#', 2);\n \n const result: GitSpec = {\n url: baseUrl\n };\n\n // Parse hash fragment if present\n if (hashPart) {\n const { ref, path } = parseHashFragment(hashPart, input);\n if (ref) result.ref = ref;\n if (path) result.path = path;\n }\n\n logger.debug('Parsed generic git URL', { input, ...result });\n\n return result;\n}\n\n/**\n * Parse legacy prefix formats with deprecation warnings.\n * \n * Supported formats:\n * - github:owner/repo[#ref][&subdirectory=path]\n * - git:<url>[#ref][&subdirectory=path]\n * \n * @param input - Raw input string\n * @returns GitSpec or null if not using legacy prefix\n */\nfunction parseLegacyPrefix(input: string): GitSpec | null {\n // Check for github: prefix\n if (input.startsWith('github:')) {\n logger.warn(`\u26A0\uFE0F The 'github:' prefix is deprecated. Use 'gh@user/repo' instead.`);\n \n const remainder = input.slice(7); // Remove 'github:'\n const [repoPart, hashPart] = remainder.split('#', 2);\n const [owner, repo] = repoPart.split('/');\n \n if (!owner || !repo) {\n throw new ValidationError(\n `Invalid github spec '${input}'. Expected github:owner/repo[#ref][&subdirectory=path]`\n );\n }\n \n const url = normalizeGitHubUrl(owner, repo);\n const result: GitSpec = { url };\n \n if (hashPart) {\n const { ref, path } = parseHashFragment(hashPart, input);\n if (ref) result.ref = ref;\n if (path) result.path = path;\n }\n \n return result;\n }\n \n // Check for git: prefix\n if (input.startsWith('git:')) {\n logger.warn(`\u26A0\uFE0F The 'git:' prefix is deprecated. Use the URL directly.`);\n \n const remainder = input.slice(4); // Remove 'git:'\n const [url, hashPart] = remainder.split('#', 2);\n \n if (!url) {\n throw new ValidationError(\n `Invalid git spec '${input}'. Expected git:<url>[#ref][&subdirectory=path]`\n );\n }\n \n const result: GitSpec = { url };\n \n if (hashPart) {\n const { ref, path } = parseHashFragment(hashPart, input);\n if (ref) result.ref = ref;\n if (path) result.path = path;\n }\n \n return result;\n }\n \n return null;\n}\n\n/**\n * Parse hash fragment for ref and path parameters.\n * \n * Supported formats:\n * - #<ref>\n * - #path=<path>\n * - #subdirectory=<path> (backward compat, no warning)\n * - #<ref>&path=<path>\n * - #<ref>&subdirectory=<path>\n * \n * @param hashPart - Hash fragment (without #)\n * @param fullInput - Full input for error messages\n * @returns Object with ref and path\n */\nfunction parseHashFragment(\n hashPart: string,\n fullInput: string\n): { ref?: string; path?: string } {\n const result: { ref?: string; path?: string } = {};\n \n // Split by & to get parts\n const parts = hashPart.split('&');\n \n for (const part of parts) {\n if (!part) continue;\n \n if (part.includes('=')) {\n // It's a key=value parameter\n const eqIndex = part.indexOf('=');\n const key = part.slice(0, eqIndex);\n const value = part.slice(eqIndex + 1);\n \n if (key === 'path' || key === 'subdirectory') {\n result.path = value;\n } else {\n throw new ValidationError(\n `Invalid hash fragment '#${hashPart}'\\n\\n` +\n `Unknown parameter: ${key}\\n\\n` +\n `Supported parameters:\\n` +\n ` \u2022 ref (unnamed): #main\\n` +\n ` \u2022 path: #path=plugins/x\\n` +\n ` \u2022 combined: #main&path=plugins/x`\n );\n }\n } else {\n // It's the ref (branch/tag/sha)\n if (result.ref) {\n throw new ValidationError(\n `Multiple refs specified in hash fragment\\n\\n` +\n `Got: #${hashPart}\\n\\n` +\n `Use only one ref: #main or #v1.0.0`\n );\n }\n result.ref = part;\n }\n }\n \n return result;\n}\n\n/**\n * Check if input looks like a git URL.\n *\n * Detection criteria:\n * - Starts with a git protocol: https://, http://, git://, ssh://, git@\n * - Ends with .git extension (hash fragment is stripped first, so repo.git#ref matches)\n */\nexport function isGitUrl(input: string): boolean {\n // Strip hash fragment for .git extension check (e.g., repo.git#ref)\n const baseUrl = input.split('#', 1)[0];\n return (\n input.startsWith('https://') ||\n input.startsWith('http://') ||\n input.startsWith('git://') ||\n input.startsWith('ssh://') ||\n input.startsWith('git@') ||\n baseUrl.endsWith('.git')\n );\n}\n\n/**\n * Normalize GitHub owner/repo to full git URL.\n * \n * @param owner - GitHub username or org\n * @param repo - Repository name\n * @returns Normalized GitHub git URL\n */\nexport function normalizeGitHubUrl(owner: string, repo: string): string {\n // Ensure repo doesn't have .git suffix for consistent handling\n const cleanRepo = repo.endsWith('.git') ? repo.slice(0, -4) : repo;\n return `https://github.com/${owner}/${cleanRepo}.git`;\n}\n", "import { basename, relative } from 'path';\nimport semver from 'semver';\nimport { PackageYml, PackageDependency } from '../types/index.js';\nimport { parsePackageYml, writePackageYml } from '../utils/package-yml.js';\nimport { exists, ensureDir } from '../utils/fs.js';\nimport { logger } from '../utils/logger.js';\nimport { getLocalOpenPackageDir, getLocalPackageYmlPath, getLocalPackagesDir, getLocalPackageDir, isRootPackage } from '../utils/paths.js';\nimport { DEPENDENCY_ARRAYS, FILE_PATTERNS, PACKAGE_PATHS } from '../constants/index.js';\nimport { createCaretRange, hasExplicitPrereleaseIntent, isPrereleaseVersion } from '../utils/version-ranges.js';\nimport { extractBaseVersion } from '../utils/version-generator.js';\nimport { isUnversionedVersion } from './package-versioning.js';\nimport { normalizePackageName, arePackageNamesEquivalent, normalizePackageNameForLookup } from '../utils/package-name.js';\nimport { extractGitHubInfo } from '../utils/git-url-parser.js';\nimport { packageManager } from './package.js';\nimport type { OutputPort } from './ports/output.js';\nimport type { PromptPort } from './ports/prompt.js';\nimport { resolveOutput, resolvePrompt } from './ports/resolve.js';\nimport { writePackageFilesToDirectory } from '../utils/package-copy.js';\nimport { getPackageFilesDir, getPackageYmlPath } from './package-context.js';\nimport { isManifestPath, normalizePackagePath } from '../utils/manifest-paths.js';\n\n/**\n * Ensure local OpenPackage directory structure exists\n * Shared utility for both install and save commands\n */\nexport async function ensureLocalOpenPackageStructure(targetDir: string): Promise<void> {\n const openpackageDir = getLocalOpenPackageDir(targetDir);\n const packagesDir = getLocalPackagesDir(targetDir);\n \n await Promise.all([\n ensureDir(openpackageDir),\n ensureDir(packagesDir)\n ]);\n}\n\n/**\n * Create a basic openpackage.yml file for workspace if it doesn't exist\n * Shared utility for both install and save commands\n * @param force - If true, overwrite existing openpackage.yml\n * @returns the openpackage.yml if it was created, null if it already existed and force=false\n */\nexport async function createWorkspacePackageYml(targetDir: string, force: boolean = false, output?: OutputPort): Promise<PackageYml | null> {\n const out = output ?? resolveOutput();\n await ensureLocalOpenPackageStructure(targetDir);\n\n const packageYmlPath = getLocalPackageYmlPath(targetDir);\n const projectName = basename(targetDir);\n const basicPackageYml: PackageYml = {\n name: projectName,\n dependencies: [],\n 'dev-dependencies': []\n };\n\n if (await exists(packageYmlPath)) {\n if (!force) {\n return null; // openpackage.yml already exists, no need to create\n }\n await writePackageYml(packageYmlPath, basicPackageYml);\n logger.info(`Overwrote basic openpackage.yml with name: ${projectName}`);\n out.success(`Overwrote basic openpackage.yml in .openpackage/ with name: ${projectName}`);\n return basicPackageYml;\n }\n\n await writePackageYml(packageYmlPath, basicPackageYml);\n logger.info(`Initialized workspace openpackage.yml`);\n out.success(`Initialized workspace openpackage.yml in .openpackage/`);\n return basicPackageYml;\n}\n\nexport interface EnsurePackageWithYmlOptions {\n interactive?: boolean;\n defaultVersion?: string;\n}\n\nexport interface EnsurePackageWithYmlResult {\n normalizedName: string;\n packageDir: string;\n packageYmlPath: string;\n packageConfig: PackageYml;\n isNew: boolean;\n}\n\n/**\n * Ensure a cached package directory and openpackage.yml exist, optionally prompting for details.\n * This is for NESTED packages only. Root packages use different flow.\n */\nexport async function ensurePackageWithYml(\n targetDir: string,\n packageName: string,\n options: EnsurePackageWithYmlOptions = {},\n output?: OutputPort,\n prompt?: PromptPort\n): Promise<EnsurePackageWithYmlResult> {\n const out = output ?? resolveOutput();\n const prm = prompt ?? resolvePrompt();\n await ensureLocalOpenPackageStructure(targetDir);\n\n const normalizedName = normalizePackageName(packageName);\n const packageDir = getPackageFilesDir(targetDir, 'nested', normalizedName);\n const packageYmlPath = getPackageYmlPath(targetDir, 'nested', normalizedName);\n\n await ensureDir(packageDir);\n\n let packageConfig: PackageYml | undefined;\n let isNew = false;\n\n if (await exists(packageYmlPath)) {\n packageConfig = await parsePackageYml(packageYmlPath);\n } else {\n isNew = true;\n // Try to seed from existing local registry copy to avoid prompts and preserve metadata.\n try {\n const registryExists = await packageManager.packageExists(normalizedName);\n if (registryExists) {\n const existing = await packageManager.loadPackage(normalizedName);\n packageConfig = {\n ...existing.metadata,\n name: normalizedName,\n partial: true\n };\n logger.info(`Loaded openpackage.yml for '${normalizedName}' from local registry copy`);\n out.success(`Loaded openpackage.yml from local registry for ${normalizedName}`);\n }\n } catch (error) {\n logger.debug('Unable to seed openpackage.yml from registry; falling back to prompts', { normalizedName, error });\n }\n\n if (!packageConfig) {\n if (options.interactive) {\n out.info(`Create new package \"${normalizedName}\"`);\n \n const description = await prm.text('Description:');\n const keywordsInput = await prm.text('Keywords (space-separated):');\n const isPrivate = await prm.confirm('Private package?', false);\n \n const keywordsArray = keywordsInput\n ? keywordsInput.trim().split(/\\s+/).filter((k: string) => k.length > 0)\n : [];\n \n packageConfig = {\n name: normalizePackageName(normalizedName),\n ...(description && { description }),\n ...(keywordsArray.length > 0 && { keywords: keywordsArray }),\n ...(isPrivate && { private: isPrivate })\n };\n } else {\n packageConfig = {\n name: normalizedName,\n ...(options.defaultVersion ? { version: options.defaultVersion } : {})\n };\n }\n\n packageConfig = {\n ...packageConfig,\n partial: true\n };\n }\n\n await writePackageYml(packageYmlPath, packageConfig);\n logger.info(\n `Created new package '${packageConfig.name}${packageConfig.version ? `@${packageConfig.version}` : ''}' at ${relative(targetDir, packageDir)}`\n );\n }\n\n if (packageConfig.name !== normalizedName) {\n const updatedConfig = { ...packageConfig, name: normalizedName };\n await writePackageYml(packageYmlPath, updatedConfig);\n packageConfig = updatedConfig;\n }\n\n return {\n normalizedName,\n packageDir,\n packageYmlPath,\n packageConfig,\n isNew\n };\n}\n\n/**\n * Add a package dependency to openpackage.yml with smart placement logic\n * Shared utility for both install and save commands\n */\nexport async function addPackageToYml(\n targetDir: string,\n packageName: string,\n packageVersion: string | undefined,\n isDev: boolean = false,\n originalVersion?: string, // The original version/range that was requested\n silent: boolean = false,\n base?: string, // Path from source root to package root (local path or git subdirectory)\n git?: string, // Git source url (DEPRECATED: use url) (mutually exclusive with base/version)\n ref?: string, // Git ref (DEPRECATED: embed in url as #ref)\n resourcePath?: string, // Resource selection path within the package (partial installs)\n output?: OutputPort\n): Promise<void> {\n const out = output ?? resolveOutput();\n const packageYmlPath = getLocalPackageYmlPath(targetDir);\n \n if (!(await exists(packageYmlPath))) {\n return; // If no openpackage.yml exists, ignore this step\n }\n \n // Don't add the workspace package to its own manifest\n if (await isRootPackage(targetDir, packageName)) {\n logger.debug(`Skipping manifest update: package '${packageName}' is the workspace package itself`);\n return;\n }\n \n const config = await parsePackageYml(packageYmlPath);\n if (!config.dependencies) config.dependencies = [];\n if (!config[DEPENDENCY_ARRAYS.DEV_DEPENDENCIES]) config[DEPENDENCY_ARRAYS.DEV_DEPENDENCIES] = [];\n\n const normalizedPackageName = normalizePackageName(packageName);\n const nameWithVersion = packageVersion ? `${packageName}@${packageVersion}` : packageName;\n const dependenciesArray = config.dependencies;\n const devDependenciesArray = config[DEPENDENCY_ARRAYS.DEV_DEPENDENCIES]!;\n\n const findIndex = (arr: PackageDependency[]): number =>\n arr.findIndex(dep => arePackageNamesEquivalent(dep.name, normalizedPackageName));\n\n let currentLocation: 'dependencies' | 'dev-dependencies' | null = null;\n let existingIndex = findIndex(dependenciesArray);\n if (existingIndex >= 0) {\n currentLocation = DEPENDENCY_ARRAYS.DEPENDENCIES;\n } else {\n existingIndex = findIndex(devDependenciesArray);\n if (existingIndex >= 0) {\n currentLocation = DEPENDENCY_ARRAYS.DEV_DEPENDENCIES;\n } else {\n existingIndex = -1;\n }\n }\n\n const existingRange =\n currentLocation && existingIndex >= 0\n ? config[currentLocation]![existingIndex]?.version\n : undefined;\n\n const shouldOmitVersion = isUnversionedVersion(packageVersion) || isUnversionedVersion(originalVersion);\n let versionToWrite: string | undefined = git ? undefined : shouldOmitVersion ? undefined : originalVersion;\n\n if (!git && !shouldOmitVersion && packageVersion) {\n const baseVersion = extractBaseVersion(packageVersion);\n const defaultRange = createCaretRange(baseVersion);\n versionToWrite = originalVersion ?? defaultRange;\n\n if (!originalVersion && existingRange) {\n const hasPrereleaseIntent = hasExplicitPrereleaseIntent(existingRange);\n const isNewVersionStable = !isPrereleaseVersion(packageVersion);\n\n if (hasPrereleaseIntent) {\n if (isNewVersionStable) {\n // Constraint has explicit prerelease intent and we're packing a stable\n // version on the same base line: normalize to a stable caret.\n versionToWrite = createCaretRange(baseVersion);\n logger.debug(\n `Updating range from prerelease-including '${existingRange}' to stable '${versionToWrite}' ` +\n `for ${packageName} (pack transition to ${packageVersion})`\n );\n } else {\n // For prerelease-intent ranges during saves (prerelease versions),\n // always preserve the existing constraint.\n versionToWrite = existingRange;\n }\n } else if (rangeIncludesVersion(existingRange, baseVersion)) {\n // Stable (non-prerelease) constraint that already includes the new base\n // version: keep it unchanged.\n versionToWrite = existingRange;\n } else {\n // Stable constraint that does not include the new base version: bump to\n // a new caret on the packed stable.\n versionToWrite = defaultRange;\n }\n }\n }\n\n // Build url field with embedded ref for git sources\n let urlField: string | undefined;\n if (git) {\n urlField = ref ? `${git}#${ref}` : git;\n }\n \n const dependency: PackageDependency = {\n name: normalizedPackageName,\n ...(versionToWrite ? { version: versionToWrite } : {}),\n ...(base ? { base } : {}), // source navigation (local path or git subdirectory)\n ...(urlField ? { url: urlField } : {}), // git source URL\n ...(resourcePath ? { path: resourcePath } : {}), // resource selection (partial installs)\n };\n \n // Determine target location (dependencies vs dev-dependencies)\n \n let targetArray: 'dependencies' | 'dev-dependencies';\n if (currentLocation === DEPENDENCY_ARRAYS.DEV_DEPENDENCIES && !isDev) {\n targetArray = DEPENDENCY_ARRAYS.DEV_DEPENDENCIES;\n logger.info(`Keeping package in dev-dependencies: ${nameWithVersion}`);\n } else if (currentLocation === DEPENDENCY_ARRAYS.DEPENDENCIES && isDev) {\n targetArray = DEPENDENCY_ARRAYS.DEV_DEPENDENCIES;\n logger.info(`Moving package from dependencies to dev-dependencies: ${nameWithVersion}`);\n } else {\n targetArray = isDev ? DEPENDENCY_ARRAYS.DEV_DEPENDENCIES : DEPENDENCY_ARRAYS.DEPENDENCIES;\n }\n \n // Remove from current location if moving between arrays\n if (currentLocation && currentLocation !== targetArray && existingIndex >= 0) {\n config[currentLocation]!.splice(existingIndex, 1);\n existingIndex = -1;\n currentLocation = null;\n }\n \n // Update or add dependency\n const targetArrayRef = config[targetArray]!;\n const existingTargetIndex =\n currentLocation === targetArray ? findIndex(targetArrayRef) : -1;\n \n if (existingTargetIndex >= 0) {\n const existingDepForTarget = targetArrayRef[existingTargetIndex];\n const versionChanged = existingDepForTarget.version !== dependency.version;\n if (versionChanged) {\n targetArrayRef[existingTargetIndex] = dependency;\n if (!silent) {\n logger.info(`Updated existing package dependency: ${nameWithVersion}`);\n out.success(`Updated ${nameWithVersion} in main openpackage.yml`);\n }\n }\n } else {\n targetArrayRef.push(dependency);\n if (!silent) {\n logger.info(`Added new package dependency: ${nameWithVersion}`);\n out.success(`Added ${nameWithVersion} to main openpackage.yml`);\n }\n }\n \n await writePackageYml(packageYmlPath, config);\n}\n\n\n/**\n * Check if a dependency matches a given package name, handling various naming formats.\n * This function supports matching across format migrations:\n * - Direct name comparison (after normalization)\n * - Git-based matching (username/repo/path combinations)\n * \n * Examples of matches:\n * - \"anthropics/claude-plugins-official/code-review\" matches \"ghanthropics/claude-plugins-official/plugins/code-review\"\n * - \"@anthropics/claude-plugins-official\" matches \"ghanthropics/claude-plugins-official\"\n * - \"username/repo\" matches \"gh@username/repo\"\n */\nfunction doesDependencyMatchPackageName(\n dep: PackageDependency,\n userInputName: string\n): boolean {\n // Normalize both for direct comparison\n const normalizedDepName = normalizePackageNameForLookup(dep.name);\n const normalizedUserName = normalizePackageNameForLookup(userInputName);\n \n // Direct match after normalization\n if (normalizedDepName === normalizedUserName) {\n return true;\n }\n \n // If dependency has a git source, try matching based on git URL + path\n const gitUrlRaw = dep.url || dep.git;\n if (gitUrlRaw) {\n const githubInfo = extractGitHubInfo(gitUrlRaw);\n if (!githubInfo) {\n return false;\n }\n \n const { username, repo } = githubInfo;\n\n // For git sources, path is resource selection, not repo subdirectory\n const actualPath = dep.base || (dep.subdirectory?.startsWith('./')\n ? dep.subdirectory.substring(2)\n : dep.subdirectory);\n \n // Build all possible name variations that could match\n const possibleNames = [\n `${username}/${repo}`,\n `@${username}/${repo}`,\n `gh@${username}/${repo}`,\n ];\n \n if (actualPath) {\n possibleNames.push(\n `${username}/${repo}/${actualPath}`,\n `@${username}/${repo}/${actualPath}`,\n `gh@${username}/${repo}/${actualPath}`,\n );\n \n // Also try with just the basename of the path\n // e.g., \"plugins/code-review\" -> \"code-review\"\n const pathBasename = actualPath.split('/').pop();\n if (pathBasename && pathBasename !== actualPath) {\n possibleNames.push(\n `${username}/${repo}/${pathBasename}`,\n `@${username}/${repo}/${pathBasename}`,\n `gh@${username}/${repo}/${pathBasename}`,\n );\n }\n }\n \n // Check if any possible name matches the user input (case-insensitive)\n const normalizedInput = normalizePackageName(userInputName);\n for (const possibleName of possibleNames) {\n if (normalizePackageName(possibleName) === normalizedInput) {\n return true;\n }\n }\n }\n \n return false;\n}\n\n/**\n * Remove a dependency entry from a manifest file (both dependencies and dev-dependencies).\n * Use this when removing from a specific package manifest (e.g. --from essentials).\n *\n * @param manifestPath - Absolute path to openpackage.yml\n * @param dependencyName - User-specified name to match (e.g. essential-agent, @scope/pkg)\n * @returns true if a dependency was removed, false otherwise\n */\nexport interface RemoveDependencyFromManifestResult {\n removed: boolean;\n section?: 'dependencies' | 'dev-dependencies';\n}\n\nexport async function removeDependencyFromManifest(\n manifestPath: string,\n dependencyName: string\n): Promise<RemoveDependencyFromManifestResult> {\n if (!(await exists(manifestPath))) return { removed: false };\n\n try {\n const config = await parsePackageYml(manifestPath);\n const sections: Array<'dependencies' | 'dev-dependencies'> = [DEPENDENCY_ARRAYS.DEPENDENCIES, DEPENDENCY_ARRAYS.DEV_DEPENDENCIES];\n let removed = false;\n let removedFromSection: 'dependencies' | 'dev-dependencies' | undefined;\n let hadAnyDependencies = false;\n\n for (const section of sections) {\n const arr = config[section];\n if (!arr) continue;\n if (arr.length > 0) hadAnyDependencies = true;\n\n // Filter out dependencies that match the package name\n // Uses context-aware matching to handle git sources and naming migrations\n const next = arr.filter(dep => !doesDependencyMatchPackageName(dep, dependencyName));\n\n if (next.length !== arr.length) {\n config[section] = next as any;\n removed = true;\n removedFromSection = section;\n }\n }\n\n // Always write the config if:\n // 1. A package was removed (to persist the removal), OR\n // 2. The file had dependencies (to trigger migration even if no removal happened)\n if (removed || hadAnyDependencies) {\n await writePackageYml(manifestPath, config);\n }\n return { removed, section: removedFromSection };\n } catch (error) {\n logger.warn(`Failed to update openpackage.yml when removing ${dependencyName}: ${error}`);\n return { removed: false };\n }\n}\n\n/**\n * Remove a dependency entry from openpackage.yml (both dependencies and dev-dependencies).\n * Operates on the workspace manifest at .openpackage/openpackage.yml\n */\nexport async function removePackageFromOpenpackageYml(\n targetDir: string,\n packageName: string\n): Promise<boolean> {\n const packageYmlPath = getLocalPackageYmlPath(targetDir);\n const result = await removeDependencyFromManifest(packageYmlPath, packageName);\n return result.removed;\n}\n\n/**\n * Check if a manifest contains a dependency matching the user input.\n * Uses context-aware matching (direct name, git variations, etc.).\n *\n * @param manifestPath - Absolute path to openpackage.yml\n * @param userInput - User-specified name (e.g. essential-agent, .opencode)\n * @returns The matched dependency's stored name if found, null otherwise\n */\nexport async function findMatchingDependencyInManifest(\n manifestPath: string,\n userInput: string\n): Promise<string | null> {\n if (!(await exists(manifestPath))) return null;\n\n try {\n const config = await parsePackageYml(manifestPath);\n const sections: Array<'dependencies' | 'dev-dependencies'> = [DEPENDENCY_ARRAYS.DEPENDENCIES, DEPENDENCY_ARRAYS.DEV_DEPENDENCIES];\n\n for (const section of sections) {\n const arr = config[section];\n if (!arr) continue;\n\n const match = arr.find(dep => doesDependencyMatchPackageName(dep, userInput));\n if (match) return match.name;\n }\n return null;\n } catch {\n return null;\n }\n}\n\nfunction rangeIncludesVersion(range: string, version: string): boolean {\n if (!range || !version) {\n return false;\n }\n try {\n return semver.satisfies(version, range, { includePrerelease: true });\n } catch {\n return false;\n }\n}\n\n\n", "import * as semver from 'semver';\n\n/**\n * Version range types supported by the system\n */\nexport type VersionRangeType = 'exact' | 'caret' | 'tilde' | 'wildcard' | 'comparison';\n\n/**\n * Parsed version range information\n */\nexport interface VersionRange {\n type: VersionRangeType;\n baseVersion: string;\n range: string;\n original: string;\n}\n\n/**\n * Parse a version string into a VersionRange object\n */\nexport function parseVersionRange(version: string): VersionRange {\n if (!version || version.trim() === '') {\n throw new Error('Version cannot be empty');\n }\n\n const trimmed = version.trim();\n \n // Handle wildcard/latest\n if (trimmed === '*' || trimmed === 'latest') {\n return {\n type: 'wildcard',\n baseVersion: '0.0.0',\n range: '*',\n original: trimmed\n };\n }\n\n // Handle caret ranges (^1.2.3)\n if (trimmed.startsWith('^')) {\n const baseVersion = trimmed.substring(1);\n if (!semver.valid(baseVersion)) {\n throw new Error(`Invalid base version for caret range: ${baseVersion}`);\n }\n return {\n type: 'caret',\n baseVersion,\n range: trimmed,\n original: trimmed\n };\n }\n\n // Handle tilde ranges (~1.2.3)\n if (trimmed.startsWith('~')) {\n const baseVersion = trimmed.substring(1);\n if (!semver.valid(baseVersion)) {\n throw new Error(`Invalid base version for tilde range: ${baseVersion}`);\n }\n return {\n type: 'tilde',\n baseVersion,\n range: trimmed,\n original: trimmed\n };\n }\n\n // Handle comparison ranges (>=1.2.3, <2.0.0, etc.)\n if (trimmed.match(/^[><=!]+/)) {\n if (!semver.validRange(trimmed)) {\n throw new Error(`Invalid comparison range: ${trimmed}`);\n }\n // Extract base version from comparison range for display purposes\n const baseVersion = semver.minVersion(trimmed)?.version || '0.0.0';\n return {\n type: 'comparison',\n baseVersion,\n range: trimmed,\n original: trimmed\n };\n }\n\n // Handle exact versions (1.2.3)\n if (semver.valid(trimmed)) {\n return {\n type: 'exact',\n baseVersion: trimmed,\n range: trimmed,\n original: trimmed\n };\n }\n\n throw new Error(`Invalid version format: ${trimmed}`);\n}\n\n/**\n * Check if a version satisfies a version range\n */\nexport function satisfiesVersion(version: string, range: string): boolean {\n try {\n // Always include prerelease versions in satisfaction checks\n return semver.satisfies(version, range, { includePrerelease: true });\n } catch (error) {\n return false;\n }\n}\n\n/**\n * Find the best version that satisfies a range from available versions\n */\nexport function findBestVersion(availableVersions: string[], range: string): string | null {\n try {\n // Sort versions in descending order (latest first)\n const sortedVersions = availableVersions\n .filter(v => semver.valid(v))\n .sort((a, b) => semver.compare(b, a));\n \n // Find the highest version that satisfies the range (including prereleases)\n return semver.maxSatisfying(sortedVersions, range, { includePrerelease: true });\n } catch (error) {\n return null;\n }\n}\n\n/**\n * Get the latest version from available versions\n */\nexport function getLatestVersion(availableVersions: string[]): string | null {\n const validVersions = availableVersions.filter(v => semver.valid(v));\n if (validVersions.length === 0) return null;\n \n return validVersions.sort((a, b) => semver.compare(b, a))[0];\n}\n\n/**\n * Create a caret range from a version (^1.2.3)\n */\nexport function createCaretRange(version: string): string {\n if (!semver.valid(version)) {\n throw new Error(`Invalid version for caret range: ${version}`);\n }\n return `^${version}`;\n}\n\n/**\n * Create a tilde range from a version (~1.2.3)\n */\nexport function createTildeRange(version: string): string {\n if (!semver.valid(version)) {\n throw new Error(`Invalid version for tilde range: ${version}`);\n }\n return `~${version}`;\n}\n\n/**\n * Check if a version range is exact (no range operators)\n */\nexport function isExactVersion(version: string): boolean {\n try {\n const parsed = parseVersionRange(version);\n return parsed.type === 'exact';\n } catch {\n return false;\n }\n}\n\n/**\n * Check if a version range is a wildcard (latest)\n */\nexport function isWildcardVersion(version: string): boolean {\n try {\n const parsed = parseVersionRange(version);\n return parsed.type === 'wildcard';\n } catch {\n return false;\n }\n}\n\n/**\n * Get a human-readable description of a version range\n */\nexport function describeVersionRange(version: string): string {\n try {\n const parsed = parseVersionRange(version);\n \n switch (parsed.type) {\n case 'exact':\n return `exact version ${parsed.baseVersion}`;\n case 'caret':\n return `compatible with ${parsed.baseVersion} (^${parsed.baseVersion})`;\n case 'tilde':\n return `approximately ${parsed.baseVersion} (~${parsed.baseVersion})`;\n case 'wildcard':\n return 'latest version (*)';\n case 'comparison':\n return `range ${parsed.range}`;\n default:\n return `version ${parsed.original}`;\n }\n } catch {\n return `invalid version ${version}`;\n }\n}\n\n/**\n * Resolve a version range to a specific version from available versions\n */\nexport function resolveVersionRange(version: string, availableVersions: string[]): string | null {\n try {\n const parsed = parseVersionRange(version);\n \n switch (parsed.type) {\n case 'exact':\n return availableVersions.includes(parsed.baseVersion) ? parsed.baseVersion : null;\n case 'wildcard':\n return getLatestVersion(availableVersions);\n default:\n // Resolve to best satisfying version including prereleases\n return findBestVersion(availableVersions, parsed.range);\n }\n } catch {\n return null;\n }\n}\n\n/**\n * Determine if a version string is a prerelease (includes WIP versions)\n */\nexport function isPrereleaseVersion(version: string): boolean {\n const parsed = semver.parse(version);\n return Boolean(parsed && parsed.prerelease.length > 0);\n}\n\n/**\n * Returns the stable base (major.minor.patch) portion of a version string.\n */\nexport function getStableBaseVersion(version: string): string | null {\n const parsed = semver.parse(version);\n if (!parsed) {\n return null;\n }\n\n return `${parsed.major}.${parsed.minor}.${parsed.patch}`;\n}\n\nexport interface VersionClassification {\n stable: string[];\n prerelease: string[];\n wip: string[];\n}\n\nexport function classifyVersions(versions: string[]): VersionClassification {\n const deduped = dedupeValidVersions(versions);\n const stable: string[] = [];\n const prerelease: string[] = [];\n const wip: string[] = [];\n\n for (const version of deduped) {\n if (isPrereleaseVersion(version)) {\n prerelease.push(version);\n wip.push(version); // All prerelease versions are treated as WIP\n } else {\n stable.push(version);\n }\n }\n\n return {\n stable: sortVersionsDesc(stable),\n prerelease: sortVersionsDesc(prerelease),\n wip: sortVersionsDesc(wip)\n };\n}\n\nexport interface VersionSelectionOptions {\n explicitPrereleaseIntent?: boolean;\n}\n\nexport interface VersionSelectionResult {\n version: string | null;\n isPrerelease: boolean;\n satisfyingStable: string[];\n satisfyingPrerelease: string[];\n availableStable: string[];\n availablePrerelease: string[];\n reason: 'exact' | 'wildcard' | 'range' | 'none';\n}\n\n/**\n * Determine whether a range explicitly references prerelease intent.\n */\nexport function hasExplicitPrereleaseIntent(range: string): boolean {\n const trimmed = range.trim();\n if (!trimmed || trimmed === '*' || trimmed.toLowerCase() === 'latest') {\n return false;\n }\n\n // Fast-path: if the original range string contains no '-' characters at all,\n // it cannot be explicitly expressing prerelease intent. This avoids treating\n // normalized comparators like \">=1.0.0-0\" (introduced by semver with\n // includePrerelease) as user-authored prerelease ranges when the original\n // input was a stable caret like \"^1.0.0\".\n if (!trimmed.includes('-')) {\n return false;\n }\n\n try {\n const parsedRange = new semver.Range(trimmed, { includePrerelease: true });\n for (const comparatorSet of parsedRange.set) {\n for (const comparator of comparatorSet) {\n if (comparator.semver.prerelease.length > 0) {\n return true;\n }\n }\n }\n } catch {\n return false;\n }\n\n return false;\n}\n\n/**\n * Select the most appropriate version according to WIP vs stable policy.\n */\nexport function selectVersionWithWipPolicy(\n availableVersions: string[],\n range: string,\n options?: VersionSelectionOptions\n): VersionSelectionResult {\n const parsedRange = parseVersionRange(range);\n const deduped = dedupeValidVersions(availableVersions);\n const availableStable = sortVersionsDesc(deduped.filter(version => !isPrereleaseVersion(version)));\n const availablePrerelease = sortVersionsDesc(deduped.filter(version => isPrereleaseVersion(version)));\n const satisfyingStable: string[] = [];\n const satisfyingPrerelease: string[] = [];\n\n const result: VersionSelectionResult = {\n version: null,\n isPrerelease: false,\n satisfyingStable,\n satisfyingPrerelease,\n availableStable,\n availablePrerelease,\n reason: 'none'\n };\n\n const finish = (): VersionSelectionResult => {\n return result;\n };\n\n if (parsedRange.type === 'exact') {\n result.reason = 'exact';\n const exactMatch = deduped.find(version => semver.eq(version, parsedRange.baseVersion));\n if (exactMatch) {\n if (isPrereleaseVersion(exactMatch)) {\n satisfyingPrerelease.push(exactMatch);\n result.isPrerelease = true;\n } else {\n satisfyingStable.push(exactMatch);\n }\n result.version = exactMatch;\n }\n return finish();\n }\n\n const normalizedRange = parsedRange.type === 'wildcard' ? '*' : parsedRange.range;\n satisfyingStable.push(\n ...filterSatisfying(availableStable, normalizedRange, false)\n );\n satisfyingPrerelease.push(\n ...filterSatisfying(availablePrerelease, normalizedRange, true)\n );\n\n // Latest wins policy: stable and WIP treated uniformly\n const allSatisfying = sortVersionsDesc([\n ...satisfyingStable,\n ...satisfyingPrerelease\n ]);\n\n if (parsedRange.type === 'wildcard') {\n result.reason = 'wildcard';\n } else {\n result.reason = 'range';\n }\n\n if (allSatisfying.length === 0) {\n return finish();\n }\n\n const selected = allSatisfying[0];\n result.version = selected;\n result.isPrerelease = isPrereleaseVersion(selected);\n return finish();\n}\n\nfunction dedupeValidVersions(versions: string[]): string[] {\n const seen = new Set<string>();\n for (const version of versions) {\n if (!version || !semver.valid(version) || seen.has(version)) {\n continue;\n }\n seen.add(version);\n }\n return Array.from(seen);\n}\n\nfunction sortVersionsDesc(versions: string[]): string[] {\n return versions.slice().sort(semver.rcompare);\n}\n\nfunction filterSatisfying(\n versions: string[],\n range: string,\n includePrerelease: boolean\n): string[] {\n try {\n return sortVersionsDesc(\n versions.filter(version => semver.satisfies(version, range, { includePrerelease }))\n );\n } catch {\n return [];\n }\n}\n", "import { createHash } from 'crypto';\nimport * as semver from 'semver';\n\n/**\n * Version generation utilities for local and WIP versions.\n */\n\nexport const WIP_TIMESTAMP_TOKEN_LENGTH = 6;\nexport const WORKSPACE_HASH_TOKEN_LENGTH = 8;\n// Length of the short workspace tag used in WIP versions (e.g. 3 base36 chars)\nexport const WIP_WORKSPACE_TAG_LENGTH = 3;\n\n\n\n/**\n * Extract the stable base (major.minor.patch) portion of a version string.\n *\n * - For any valid semver (including pre-releases like \"1.2.3-000fz8.a3k\"\n * or legacy \"1.2.3-wip.abc\"), this returns \"1.2.3\".\n * - For non-semver strings, it returns the portion before the first \"-\"\n * (if any), otherwise the input unchanged.\n */\nexport function extractBaseVersion(version: string): string {\n const hyphenIndex = version.indexOf('-');\n const candidate = hyphenIndex === -1 ? version : version.slice(0, hyphenIndex);\n\n const parsed = semver.parse(candidate);\n if (parsed) {\n return `${parsed.major}.${parsed.minor}.${parsed.patch}`;\n }\n return candidate;\n}\n\nconst TIMESTAMP_ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz';\n\n/**\n * Encode a non-negative integer into a fixed-length base36-like string.\n * Values that exceed the requested length are truncated from the left (highest order digits).\n */\nexport function encodeBase62(value: number, length: number = WIP_TIMESTAMP_TOKEN_LENGTH): string {\n if (!Number.isFinite(value) || value < 0) {\n value = 0;\n }\n\n const base = TIMESTAMP_ALPHABET.length;\n let remaining = Math.floor(value);\n let encoded = '';\n\n do {\n const digit = remaining % base;\n encoded = `${TIMESTAMP_ALPHABET[digit]}${encoded}`;\n remaining = Math.floor(remaining / base);\n } while (remaining > 0);\n\n if (encoded.length < length) {\n encoded = encoded.padStart(length, '0');\n } else if (encoded.length > length) {\n encoded = encoded.slice(-length);\n }\n\n return encoded;\n}\n\n/**\n * Generate a 6-character base36 timestamp from the provided Date (defaults to now).\n */\nexport function generateBase62Timestamp(\n date: Date = new Date(),\n length: number = WIP_TIMESTAMP_TOKEN_LENGTH\n): string {\n const seconds = Math.floor(date.getTime() / 1000);\n return encodeBase62(seconds, length);\n}\n\n/**\n * Create a deterministic hash for the current workspace path.\n * Returns a lower-case hex slice (default 8 characters).\n */\nexport function createWorkspaceHash(\n inputPath: string,\n length: number = WORKSPACE_HASH_TOKEN_LENGTH\n): string {\n const normalizedPath = inputPath.replace(/\\\\/g, '/');\n const digest = createHash('sha256').update(normalizedPath).digest('hex');\n if (length <= 0) {\n return '';\n }\n if (digest.length <= length) {\n return digest.padEnd(length, '0');\n }\n return digest.slice(0, length);\n}\n\n/**\n * Sanitize a workspace hash string to a fixed length.\n * Internal helper used for tag generation.\n */\nfunction sanitizeWorkspaceHash(\n hash: string,\n length: number\n): string {\n const cleaned = (hash || '').toLowerCase().replace(/[^0-9a-z]/g, '');\n if (cleaned.length === 0) {\n return ''.padEnd(length, '0');\n }\n if (cleaned.length >= length) {\n return cleaned.slice(0, length);\n }\n return `${cleaned}${'0'.repeat(length - cleaned.length)}`;\n}\n\n/**\n * Create the workspace tag used in WIP versions.\n * Returns a 3-character tag derived from the workspace path hash.\n * This is the single source of truth for workspace tags used in version strings.\n */\nexport function createWorkspaceTag(inputPath: string): string {\n const workspaceHash = createWorkspaceHash(inputPath);\n return sanitizeWorkspaceHash(workspaceHash, WIP_WORKSPACE_TAG_LENGTH);\n}\n\n/**\n * Generate a WIP version string in the canonical S-<t>.<w> form.\n *\n * - `stable` is the normalized base stable version (e.g. \"1.2.3\").\n * - `workspacePath` is the workspace path; the tag is derived from it.\n * - `options.now` can be provided for deterministic testing.\n */\nexport function generateWipVersion(\n stable: string,\n workspacePath: string,\n options?: { now?: Date }\n): string {\n const timestampPart = generateBase62Timestamp(\n options?.now ?? new Date(),\n WIP_TIMESTAMP_TOKEN_LENGTH\n );\n const hashPart = createWorkspaceTag(workspacePath);\n return `${stable}-${timestampPart}.${hashPart}`;\n}\n\nexport interface ParsedWipVersion {\n baseStable: string;\n timestamp: string;\n workspaceHash: string;\n}\n\n/**\n * Parse a WIP version string.\n *\n * Supports the S-<t>.<w> scheme: {base}-{timestamp}.{workspaceTag} (e.g. 1.2.3-000fz8.a3k)\n */\nexport function parseWipVersion(version: string): ParsedWipVersion | null {\n const parsed = semver.parse(version);\n if (parsed && parsed.prerelease.length === 2) {\n const [timestamp, workspaceHash] = parsed.prerelease;\n if (typeof timestamp === 'string' && typeof workspaceHash === 'string') {\n return {\n baseStable: `${parsed.major}.${parsed.minor}.${parsed.patch}`,\n timestamp,\n workspaceHash\n };\n }\n }\n\n return null;\n}\n\nexport function extractWorkspaceHashFromVersion(version: string): string | null {\n const parsed = parseWipVersion(version);\n return parsed?.workspaceHash ?? null;\n}\n\n", "import * as semver from 'semver';\nimport yaml from 'js-yaml';\nimport { PackageFile, PackageYml } from '../types/index.js';\nimport { extractBaseVersion } from '../utils/version-generator.js';\nimport { getPackageVersionPath } from './directory.js';\nimport { exists } from '../utils/fs.js';\nimport { FILE_PATTERNS, UNVERSIONED } from '../constants/index.js';\nimport { isScopedName } from '../utils/package-name.js';\n\n/**\n * Compute stable version from a prerelease version\n * Example: \"1.2.3-dev.abc123\" -> \"1.2.3\"\n */\nexport function computeStableVersion(version: string): string {\n const parsed = semver.parse(version);\n if (parsed) {\n return `${parsed.major}.${parsed.minor}.${parsed.patch}`;\n }\n return extractBaseVersion(version);\n}\n\n/**\n * Dump YAML with proper quoting for scoped names (e.g., @scope/name)\n */\nexport function dumpYamlWithScopedQuoting(config: PackageYml, options: yaml.DumpOptions = {}): string {\n let dumped = yaml.dump(config, { flowLevel: 1, ...options, quotingType: '\"' });\n \n // Ensure scoped names are quoted\n if (isScopedName(config.name)) {\n const lines = dumped.split('\\n');\n for (let i = 0; i < lines.length; i++) {\n if (lines[i].trim().startsWith('name:')) {\n const valueMatch = lines[i].match(/name:\\s*(.+)$/);\n if (valueMatch) {\n const value = valueMatch[1].trim();\n if (!value.startsWith('\"') && !value.startsWith(\"'\")) {\n lines[i] = lines[i].replace(/name:\\s*(.+)$/, `name: \"${config.name}\"`);\n }\n }\n break;\n }\n }\n dumped = lines.join('\\n');\n }\n \n return dumped;\n}\n\n/**\n * Transform package files for version change only (no name change)\n * Updates openpackage.yml version field\n */\nexport function transformPackageFilesForVersionChange(\n files: PackageFile[],\n newVersion: string,\n packageName: string\n): PackageFile[] {\n return files.map((file) => {\n if (file.path === FILE_PATTERNS.OPENPACKAGE_YML) {\n try {\n const parsed = yaml.load(file.content) as PackageYml;\n const updated: PackageYml = {\n ...parsed,\n version: newVersion\n };\n const dumped = dumpYamlWithScopedQuoting(updated, { lineWidth: 120 });\n return { ...file, content: dumped };\n } catch {\n // Fallback: minimal rewrite if parsing fails\n const fallback: PackageYml = {\n name: packageName,\n version: newVersion\n };\n const dumped = dumpYamlWithScopedQuoting(fallback, { lineWidth: 120 });\n return { ...file, content: dumped };\n }\n }\n return file;\n });\n}\n\n/**\n * Check if a package version already exists\n */\nexport async function packageVersionExists(packageName: string, version?: string): Promise<boolean> {\n const targetPath = getPackageVersionPath(packageName, version ?? UNVERSIONED);\n return await exists(targetPath);\n}\n\n/**\n * Returns true when a version is absent or explicitly marked as unversioned.\n */\nexport function isUnversionedVersion(version?: string | null): boolean {\n return version === undefined || version === null || version === UNVERSIONED;\n}\n\n/**\n * Normalizes a version string for display/logging.\n */\nexport function formatVersionLabel(version?: string | null): string {\n return isUnversionedVersion(version) ? UNVERSIONED : (version as string);\n}\n\n/**\n * Filter a list of versions down to semver-valid stable releases.\n */\nexport function filterStableVersions(versions: string[]): string[] {\n return versions.filter((version) => semver.valid(version) && !semver.prerelease(version));\n}\n\n/**\n * Find the latest stable version from a list (returns null if none).\n */\nexport function getLatestStableVersion(versions: string[]): string | null {\n const stableVersions = filterStableVersions(versions);\n if (stableVersions.length === 0) {\n return null;\n }\n return semver.rsort(stableVersions)[0];\n}\n", "import { join, relative, dirname, basename } from 'path';\nimport { isJunk } from 'junk';\nimport { Package, PackageFile } from '../types/index.js';\nimport {\n exists,\n walkFiles,\n readTextFile,\n writeTextFile,\n remove,\n ensureDir\n} from '../utils/fs.js';\nimport { PACKAGE_BOUNDARY_DIRS } from '../constants/workspace.js';\nimport { logger } from '../utils/logger.js';\nimport {\n PackageNotFoundError,\n InvalidPackageError,\n} from '../utils/errors.js';\nimport { validatePackageName } from '../utils/package-name.js';\nimport {\n getPackagePath,\n getPackageVersionPath,\n getLatestPackageVersion,\n listPackageVersions\n} from './directory.js';\nimport { parsePackageYml, writePackageYml } from '../utils/package-yml.js';\nimport {\n resolveVersionRange,\n isExactVersion\n} from '../utils/version-ranges.js';\nimport { PACKAGE_PATHS, UNVERSIONED } from '../constants/index.js';\nimport { getTransformedPlugin } from './install/plugin-transformer.js';\nimport { loadPackageFromPath } from './install/path-package-loader.js';\n\n/**\n * Package management operations\n */\n\nexport interface PackageVersionState {\n exists: boolean;\n isPartial: boolean;\n paths: string[];\n}\n\ninterface PackageSaveOptions {\n partial?: boolean;\n}\n\nexport class PackageManager {\n \n \n /**\n * Load a package from the registry (latest version by default)\n */\n async loadPackage(\n packageName: string,\n version?: string,\n opts?: { packageRootDir?: string }\n ): Promise<Package> {\n logger.debug(`Loading package: ${packageName}`, { version });\n \n validatePackageName(packageName);\n \n // Check if this is a cached transformed plugin\n if (version) {\n const cachedPluginWithContext = getTransformedPlugin(packageName, version);\n if (cachedPluginWithContext) {\n logger.debug(`Using cached transformed plugin: ${packageName}@${version}`);\n return cachedPluginWithContext.package;\n }\n }\n \n let targetVersion: string | null = opts?.packageRootDir ? version ?? null : null;\n \n if (opts?.packageRootDir) {\n // Use provided root; version comes from manifest.\n } else if (version === UNVERSIONED) {\n targetVersion = UNVERSIONED;\n } else if (version) {\n // Check if it's a version range or exact version\n if (isExactVersion(version)) {\n targetVersion = version;\n } else {\n // It's a version range - resolve it to a specific version\n const availableVersions = await listPackageVersions(packageName);\n if (availableVersions.length === 0) {\n throw new PackageNotFoundError(packageName);\n }\n \n targetVersion = resolveVersionRange(version, availableVersions);\n if (!targetVersion) {\n throw new PackageNotFoundError(\n `No version of '${packageName}' satisfies range '${version}'. Available versions: ${availableVersions.join(', ')}`\n );\n }\n logger.debug(`Resolved version range '${version}' to '${targetVersion}' for package '${packageName}'`);\n }\n } else {\n // No version specified - get latest\n targetVersion = await getLatestPackageVersion(packageName);\n }\n \n if (!targetVersion && !opts?.packageRootDir) {\n throw new PackageNotFoundError(packageName);\n }\n \n const packagePath = opts?.packageRootDir\n ? opts.packageRootDir\n : getPackageVersionPath(packageName, targetVersion ?? undefined);\n if (!(await exists(packagePath))) {\n throw new PackageNotFoundError(packageName);\n }\n \n try {\n // Use unified loader to handle both regular packages and plugins\n const pkg = await loadPackageFromPath(packagePath, { packageName });\n return pkg;\n } catch (error) {\n if (error instanceof PackageNotFoundError || (error as any).name === 'ValidationError') {\n throw new PackageNotFoundError(packageName);\n }\n logger.error(`Failed to load package: ${packageName}`, { error });\n throw new InvalidPackageError(`Failed to load package: ${error}`);\n }\n }\n \n /**\n * Save a package to the registry (versioned)\n */\n async savePackage(pkg: Package, options: PackageSaveOptions = {}): Promise<void> {\n const { metadata, files } = pkg;\n const packagePath = getPackageVersionPath(metadata.name, metadata.version);\n \n logger.debug(`Saving package: ${metadata.name}@${metadata.version ?? UNVERSIONED}`, { packagePath });\n \n try {\n // Ensure the version directory exists\n await ensureDir(packagePath);\n \n // Save files\n for (const file of files) {\n const fullPath = join(packagePath, file.path);\n await ensureDir(dirname(fullPath));\n await writeTextFile(fullPath, file.content, (file.encoding as BufferEncoding) || 'utf8');\n }\n if (options.partial) {\n await this.markPartialInManifest(packagePath);\n } else {\n await this.clearPartialInManifest(packagePath);\n }\n \n logger.info(`Package '${metadata.name}@${metadata.version}' saved successfully`);\n } catch (error) {\n logger.error(`Failed to save package: ${metadata.name}@${metadata.version}`, { error });\n throw new InvalidPackageError(`Failed to save package: ${error}`);\n }\n }\n \n /**\n * Delete a specific version of a package\n */\n async deletePackageVersion(packageName: string, version: string): Promise<void> {\n logger.info(`Deleting package version: ${packageName}@${version}`);\n \n validatePackageName(packageName);\n \n const packagePath = getPackageVersionPath(packageName, version);\n \n if (!(await exists(packagePath))) {\n throw new PackageNotFoundError(`${packageName}@${version}`);\n }\n \n try {\n await remove(packagePath);\n logger.info(`Package version '${packageName}@${version}' deleted successfully`);\n } catch (error) {\n logger.error(`Failed to delete package version: ${packageName}@${version}`, { error });\n throw new InvalidPackageError(`Failed to delete package version: ${error}`);\n }\n }\n \n /**\n * Delete all versions of a package\n */\n async deletePackage(packageName: string): Promise<void> {\n logger.info(`Deleting all versions of package: ${packageName}`);\n \n validatePackageName(packageName);\n \n const packagePath = getPackagePath(packageName);\n \n if (!(await exists(packagePath))) {\n throw new PackageNotFoundError(packageName);\n }\n \n try {\n await remove(packagePath);\n logger.info(`All versions of package '${packageName}' deleted successfully`);\n } catch (error) {\n logger.error(`Failed to delete package: ${packageName}`, { error });\n throw new InvalidPackageError(`Failed to delete package: ${error}`);\n }\n }\n \n /**\n * Check if a package exists in the registry (any version)\n */\n async packageExists(packageName: string): Promise<boolean> {\n validatePackageName(packageName);\n const latestVersion = await getLatestPackageVersion(packageName);\n return latestVersion !== null;\n }\n\n /**\n * Return local state for a specific package version, including partial metadata.\n */\n async getPackageVersionState(packageName: string, version?: string): Promise<PackageVersionState> {\n const packagePath = getPackageVersionPath(packageName, version);\n const existsLocally = await exists(packagePath);\n\n if (!existsLocally) {\n return { exists: false, isPartial: false, paths: [] };\n }\n\n const manifestPath = join(packagePath, PACKAGE_PATHS.MANIFEST_RELATIVE);\n const manifestExists = await exists(manifestPath);\n let isPartial = !manifestExists;\n if (manifestExists) {\n try {\n const manifest = await parsePackageYml(manifestPath);\n isPartial = Boolean((manifest as any).partial);\n } catch (error) {\n logger.warn('Failed to read package manifest for partial state', { packageName, version, error });\n isPartial = true;\n }\n }\n\n const paths = await this.listPackageFilePaths(packagePath);\n\n return { exists: true, isPartial, paths };\n }\n \n /**\n * Discover all files in a package directory\n */\n\n private async discoverPackageFiles(packagePath: string): Promise<PackageFile[]> {\n const files: PackageFile[] = [];\n\n try {\n // Include all file types (no filtering)\n // Get all files recursively in the package directory\n for await (const fullPath of walkFiles(packagePath, [], { excludeDirs: PACKAGE_BOUNDARY_DIRS })) {\n const relativePath = relative(packagePath, fullPath);\n\n // Filter out junk files\n if (isJunk(basename(relativePath))) {\n continue;\n }\n\n const content = await readTextFile(fullPath);\n\n files.push({\n path: relativePath,\n content,\n encoding: 'utf8'\n });\n }\n\n logger.debug(`Discovered ${files.length} files in package directory`, { packagePath });\n return files;\n } catch (error) {\n logger.error(`Failed to discover files in package directory: ${packagePath}`, { error });\n throw new InvalidPackageError(`Failed to discover package files: ${error}`);\n }\n }\n\n private async markPartialInManifest(packagePath: string): Promise<void> {\n const manifestPath = join(packagePath, PACKAGE_PATHS.MANIFEST_RELATIVE);\n if (!(await exists(manifestPath))) {\n return;\n }\n\n const manifest = await parsePackageYml(manifestPath);\n (manifest as any).partial = true;\n await writePackageYml(manifestPath, manifest);\n }\n\n private async clearPartialInManifest(packagePath: string): Promise<void> {\n const manifestPath = join(packagePath, PACKAGE_PATHS.MANIFEST_RELATIVE);\n if (!(await exists(manifestPath))) {\n return;\n }\n\n const manifest = await parsePackageYml(manifestPath);\n if ((manifest as any).partial !== undefined) {\n delete (manifest as any).partial;\n await writePackageYml(manifestPath, manifest);\n }\n }\n\n private async listPackageFilePaths(packagePath: string): Promise<string[]> {\n const paths: string[] = [];\n for await (const fullPath of walkFiles(packagePath, [], { excludeDirs: PACKAGE_BOUNDARY_DIRS })) {\n const relativePath = relative(packagePath, fullPath);\n if (isJunk(basename(relativePath))) {\n continue;\n }\n paths.push(relativePath);\n }\n paths.sort();\n return paths;\n }\n}\n\n// Create and export a singleton instance\nexport const packageManager = new PackageManager();\n\n", "import { join, relative } from 'path';\nimport { readTextFile, walkFiles } from '../../utils/fs.js';\nimport { logger } from '../../utils/logger.js';\nimport { ValidationError } from '../../utils/errors.js';\nimport { isJunk } from 'junk';\nimport type { Package, PackageFile, PackageYml, PackageWithContext } from '../../types/index.js';\nimport { detectPackageFormat } from './format-detector.js';\nimport { DIR_PATTERNS } from '../../constants/index.js';\nimport { PACKAGE_BOUNDARY_DIRS } from '../../constants/workspace.js';\nimport { generateGitHubPackageName } from '../../utils/plugin-naming.js';\nimport { createPlatformContext } from '../conversion-context/index.js';\nimport { resolvePluginMetadata, type ClaudePluginManifest } from './plugin-metadata-resolver.js';\nimport type { MarketplacePluginEntry } from './marketplace-handler.js';\n\n/**\n * In-memory cache for transformed plugin packages with context.\n * Key: `${packageName}@${version}`\n */\nconst transformedPluginCache = new Map<string, PackageWithContext>();\n\n/**\n * Cache a transformed plugin package with context for later retrieval.\n */\nfunction cacheTransformedPlugin(pkg: Package, context?: any): void {\n const key = `${pkg.metadata.name}@${pkg.metadata.version}`;\n const cached: PackageWithContext = context \n ? { package: pkg, context }\n : { package: pkg, context: createPlatformContext('claude-plugin', 1.0) };\n transformedPluginCache.set(key, cached);\n}\n\n/**\n * Retrieve a cached transformed plugin package with context.\n */\nexport function getTransformedPlugin(name: string, version: string): PackageWithContext | undefined {\n const key = `${name}@${version}`;\n return transformedPluginCache.get(key);\n}\n\n/**\n * Context for transforming a plugin with naming information.\n */\ninterface PluginTransformContext {\n gitUrl?: string;\n path?: string;\n resourcePath?: string;\n repoPath?: string;\n marketplaceEntry?: MarketplacePluginEntry;\n}\n\n/**\n * Transform a Claude Code plugin to an OpenPackage Package with conversion context.\n * \n * Resolves plugin metadata from plugin.json or marketplace entry, converts it to\n * OpenPackage format, and collects all plugin files.\n * \n * @param pluginDir - Absolute path to plugin directory\n * @param context - Optional context for scoped naming and marketplace entry\n * @returns Package object with conversion context\n */\nexport async function transformPluginToPackage(\n pluginDir: string,\n context?: PluginTransformContext\n): Promise<PackageWithContext> {\n // Resolve plugin metadata from plugin.json or marketplace entry\n const resolved = await resolvePluginMetadata(pluginDir, context?.marketplaceEntry);\n const pluginManifest = resolved.manifest;\n \n // Generate scoped name using consistent naming logic\n // Always generate the name (no override) to ensure consistency\n const packageName = generateGitHubPackageName({\n gitUrl: context?.gitUrl,\n path: context?.path,\n resourcePath: context?.resourcePath,\n packageName: pluginManifest.name,\n repoPath: context?.repoPath\n });\n \n // Transform to OpenPackage metadata\n const metadata: PackageYml = {\n name: packageName,\n // Claude Code plugins often omit version; normalize to a concrete value so:\n // - logs/install output are consistent\n // - transformed plugin cache keys remain stable\n version: pluginManifest.version?.trim() || '0.0.0',\n description: pluginManifest.description,\n keywords: pluginManifest.keywords,\n license: pluginManifest.license,\n homepage: pluginManifest.homepage\n };\n \n // Extract author name\n if (pluginManifest.author?.name) {\n metadata.author = pluginManifest.author.name;\n }\n \n // Extract repository - handle both string and object forms\n if (pluginManifest.repository) {\n if (typeof pluginManifest.repository === 'string') {\n metadata.repository = {\n type: 'git',\n url: pluginManifest.repository\n };\n } else if (pluginManifest.repository.url) {\n metadata.repository = {\n type: pluginManifest.repository.type || 'git',\n url: pluginManifest.repository.url\n };\n }\n }\n \n // Collect all plugin files (preserve entire directory structure)\n const files = await extractPluginFiles(pluginDir);\n \n // Detect package format\n // Claude plugins are detected as platform-specific 'claude-plugin' format\n // and will use the claude-plugin flows defined in platforms.jsonc\n const format = detectPackageFormat(files);\n \n const pkg: Package = {\n metadata,\n files,\n // Store format metadata for installation pipeline\n // Force platform to 'claude-plugin' since detectPackageFormat sees 'universal'\n // (.claude-plugin/ is stripped from files), but we know the true source format.\n _format: {\n ...format,\n type: 'platform-specific' as const,\n platform: 'claude-plugin',\n }\n };\n \n // Create conversion context for claude-plugin\n const conversionContext = createPlatformContext('claude-plugin', format.confidence);\n \n // Cache the transformed plugin with context for later retrieval\n cacheTransformedPlugin(pkg, conversionContext);\n \n logger.info('Transformed Claude Code plugin', {\n name: metadata.name,\n version: metadata.version,\n fileCount: files.length,\n format: format.type,\n platform: format.platform,\n confidence: format.confidence\n });\n \n return { package: pkg, context: conversionContext };\n}\n\n/**\n * Extract all files from a plugin directory, preserving structure.\n * \n * Plugin files are kept with their original paths (commands/, agents/, etc.)\n * The OpenPackage platform system will handle installing them to the correct\n * platform-specific directories (.claude/commands/, .cursor/commands/, etc.)\n * \n * Special handling for plugin-specific files:\n * - .claude-plugin/ \u2192 skipped (plugin metadata, not needed in workspace)\n * - .mcp.json, .lsp.json \u2192 kept as root files\n * - commands/, agents/, skills/, hooks/ \u2192 universal subdirs\n * \n * @param pluginDir - Absolute path to plugin directory\n * @returns Array of package files with original paths\n */\nasync function extractPluginFiles(pluginDir: string): Promise<PackageFile[]> {\n const files: PackageFile[] = [];\n \n try {\n for await (const fullPath of walkFiles(pluginDir, [], { excludeDirs: PACKAGE_BOUNDARY_DIRS })) {\n const relativePath = relative(pluginDir, fullPath);\n \n // Skip junk files (e.g., .DS_Store, Thumbs.db)\n const pathParts = relativePath.split('/');\n if (pathParts.some(part => isJunk(part))) {\n continue;\n }\n \n // Skip git metadata\n if (relativePath.startsWith('.git/') || relativePath === '.git') {\n continue;\n }\n \n // Skip .claude-plugin directory (plugin metadata, not needed in workspace)\n if (relativePath.startsWith(`${DIR_PATTERNS.CLAUDE_PLUGIN}/`)) {\n continue;\n }\n \n const content = await readTextFile(fullPath);\n \n files.push({\n path: relativePath,\n content,\n encoding: 'utf8'\n });\n }\n \n return files;\n \n } catch (error) {\n throw new ValidationError(\n `Failed to extract files from plugin directory ${pluginDir}: ${error}`\n );\n }\n}\n\n\n", "/**\n * Conversion Context Creation\n * \n * Functions for creating PackageConversionContext instances from various sources.\n */\n\nimport type { \n PackageConversionContext, \n FormatIdentity,\n FormatState \n} from '../../types/conversion-context.js';\nimport type { Platform } from '../platforms.js';\nimport type { PackageFormat } from '../install/format-detector.js';\nimport type { Package } from '../../types/index.js';\nimport { detectPackageFormat } from '../install/format-detector.js';\nimport { logger } from '../../utils/logger.js';\n\n/**\n * Create context from detected package format\n * \n * Use this at package discovery/loading time when format is detected.\n */\nexport function createContextFromFormat(format: PackageFormat): PackageConversionContext {\n const now = new Date();\n \n const originalFormat: FormatIdentity = {\n type: format.type,\n platform: format.platform,\n detectedAt: now,\n confidence: format.confidence\n };\n \n const currentFormat: FormatState = {\n type: format.type,\n platform: format.platform\n };\n \n logger.debug('Created conversion context from format', {\n type: format.type,\n platform: format.platform,\n confidence: format.confidence\n });\n \n return {\n originalFormat,\n currentFormat,\n conversionHistory: [],\n targetPlatform: undefined\n };\n}\n\n/**\n * Create context from package (detects format first)\n * \n * Use this when loading a package without prior format information.\n * Fallback for backward compatibility during migration.\n */\nexport function createContextFromPackage(pkg: Package): PackageConversionContext {\n // Use existing _format if available (for backward compatibility)\n const format = pkg._format || detectPackageFormat(pkg.files);\n \n logger.debug('Creating context from package', {\n name: pkg.metadata.name,\n hasFormat: !!pkg._format,\n detectedType: format.type,\n detectedPlatform: format.platform\n });\n \n return createContextFromFormat(format);\n}\n\n/**\n * Create context for a known platform-specific package\n * \n * Use this when you know definitively what platform a package is for\n * (e.g., when transforming a claude-plugin).\n */\nexport function createPlatformContext(\n platform: Platform,\n confidence: number = 1.0\n): PackageConversionContext {\n const now = new Date();\n \n const originalFormat: FormatIdentity = {\n type: 'platform-specific',\n platform,\n detectedAt: now,\n confidence\n };\n \n const currentFormat: FormatState = {\n type: 'platform-specific',\n platform\n };\n \n logger.debug('Created platform-specific context', { platform, confidence });\n \n return {\n originalFormat,\n currentFormat,\n conversionHistory: [],\n targetPlatform: undefined\n };\n}\n\n/**\n * Create context for universal format package\n * \n * Use this when loading a package that's already in universal format.\n */\nexport function createUniversalContext(\n confidence: number = 1.0\n): PackageConversionContext {\n const now = new Date();\n \n const originalFormat: FormatIdentity = {\n type: 'universal',\n platform: undefined,\n detectedAt: now,\n confidence\n };\n \n const currentFormat: FormatState = {\n type: 'universal',\n platform: undefined\n };\n \n logger.debug('Created universal format context', { confidence });\n \n return {\n originalFormat,\n currentFormat,\n conversionHistory: [],\n targetPlatform: undefined\n };\n}\n\n/**\n * Update context with target platform\n * \n * Sets the target platform for the current operation.\n * Returns a new context object (does not mutate).\n */\nexport function withTargetPlatform(\n context: PackageConversionContext,\n targetPlatform: Platform\n): PackageConversionContext {\n return {\n ...context,\n targetPlatform\n };\n}\n\n/**\n * Update context after successful conversion\n * \n * Records the conversion in history and updates current format.\n * Returns a new context object (does not mutate).\n */\nexport function updateContextAfterConversion(\n context: PackageConversionContext,\n newFormat: FormatState,\n targetPlatform: Platform\n): PackageConversionContext {\n const timestamp = new Date();\n \n const newHistory = [\n ...context.conversionHistory,\n {\n from: context.currentFormat,\n to: newFormat,\n targetPlatform,\n timestamp\n }\n ];\n \n logger.debug('Updated context after conversion', {\n from: {\n type: context.currentFormat.type,\n platform: context.currentFormat.platform\n },\n to: {\n type: newFormat.type,\n platform: newFormat.platform\n },\n targetPlatform,\n totalConversions: newHistory.length\n });\n \n return {\n ...context,\n currentFormat: newFormat,\n conversionHistory: newHistory,\n targetPlatform\n };\n}\n", "/**\n * Conversion Context Serialization\n * \n * Functions for serializing/deserializing context to/from JSON for temp directory persistence.\n */\n\nimport type { \n PackageConversionContext, \n SerializedConversionContext,\n ContextSerializationOptions \n} from '../../types/conversion-context.js';\nimport type { Platform } from '../platforms.js';\nimport { logger } from '../../utils/logger.js';\nimport { validateContext } from './validation.js';\n\n/**\n * Serialize context to JSON-friendly format\n * \n * Converts Date objects to ISO strings for JSON serialization.\n */\nexport function serializeContext(\n context: PackageConversionContext,\n options?: ContextSerializationOptions\n): SerializedConversionContext {\n const includeHistory = options?.includeHistory ?? true;\n \n return {\n originalFormat: {\n type: context.originalFormat.type,\n platform: context.originalFormat.platform,\n detectedAt: context.originalFormat.detectedAt.toISOString(),\n confidence: context.originalFormat.confidence\n },\n currentFormat: {\n type: context.currentFormat.type,\n platform: context.currentFormat.platform\n },\n conversionHistory: includeHistory ? context.conversionHistory.map(record => ({\n from: {\n type: record.from.type,\n platform: record.from.platform\n },\n to: {\n type: record.to.type,\n platform: record.to.platform\n },\n targetPlatform: record.targetPlatform,\n timestamp: record.timestamp.toISOString()\n })) : [],\n targetPlatform: context.targetPlatform\n };\n}\n\n/**\n * Deserialize context from JSON format\n * \n * Converts ISO strings back to Date objects.\n * Validates the restored context.\n */\nexport function deserializeContext(\n serialized: SerializedConversionContext\n): PackageConversionContext {\n const context: PackageConversionContext = {\n originalFormat: {\n type: serialized.originalFormat.type,\n platform: serialized.originalFormat.platform as Platform | undefined,\n detectedAt: new Date(serialized.originalFormat.detectedAt),\n confidence: serialized.originalFormat.confidence\n },\n currentFormat: {\n type: serialized.currentFormat.type,\n platform: serialized.currentFormat.platform as Platform | undefined\n },\n conversionHistory: serialized.conversionHistory.map(record => ({\n from: {\n type: record.from.type,\n platform: record.from.platform as Platform | undefined\n },\n to: {\n type: record.to.type,\n platform: record.to.platform as Platform | undefined\n },\n targetPlatform: record.targetPlatform as Platform,\n timestamp: new Date(record.timestamp)\n })),\n targetPlatform: serialized.targetPlatform as Platform | undefined\n };\n \n // Validate restored context\n try {\n validateContext(context);\n } catch (error) {\n logger.error('Deserialized context failed validation', { error });\n throw error;\n }\n \n return context;\n}\n\n/**\n * Serialize context to JSON string\n * \n * Convenience function for writing to files.\n */\nexport function contextToJSON(\n context: PackageConversionContext,\n options?: ContextSerializationOptions\n): string {\n const serialized = serializeContext(context, options);\n const pretty = options?.pretty ?? true;\n \n return JSON.stringify(serialized, null, pretty ? 2 : 0);\n}\n\n/**\n * Deserialize context from JSON string\n * \n * Convenience function for reading from files.\n */\nexport function contextFromJSON(json: string): PackageConversionContext {\n try {\n const serialized = JSON.parse(json) as SerializedConversionContext;\n return deserializeContext(serialized);\n } catch (error) {\n logger.error('Failed to parse context JSON', { error });\n throw new Error(`Invalid context JSON: ${(error as Error).message}`);\n }\n}\n\n/**\n * Create a human-readable description of context\n * \n * Useful for logging and debugging.\n */\nexport function describeContext(context: PackageConversionContext): string {\n const lines = [\n `Original: ${context.originalFormat.platform || 'universal'} (detected ${context.originalFormat.detectedAt.toISOString()})`,\n `Current: ${context.currentFormat.platform || 'universal'}`,\n `Target: ${context.targetPlatform || 'none'}`,\n `Conversions: ${context.conversionHistory.length}`\n ];\n \n if (context.conversionHistory.length > 0) {\n lines.push('History:');\n context.conversionHistory.forEach((h, i) => {\n lines.push(\n ` ${i + 1}. ${h.from.platform || 'universal'} \u2192 ${h.to.platform || 'universal'} ` +\n `(for ${h.targetPlatform}) at ${h.timestamp.toLocaleString()}`\n );\n });\n }\n \n return lines.join('\\n');\n}\n", "/**\n * Format Detector Module\n * \n * Detects package format (universal vs platform-specific) by analyzing file structure.\n * Used to determine conversion strategy during installation.\n */\n\nimport { dirname } from 'path';\nimport type { Platform } from '../platforms.js';\nimport type { PackageFile } from '../../types/index.js';\nimport type { PackageConversionContext } from '../../types/conversion-context.js';\nimport type { EnhancedPackageFormat } from './detection-types.js';\nimport { getAllPlatforms, isPlatformId } from '../platforms.js';\nimport { logger } from '../../utils/logger.js';\nimport { createContextFromFormat } from '../conversion-context/index.js';\n\n/**\n * Package format classification\n */\nexport interface PackageFormat {\n /**\n * Format type: universal (commands/, agents/) or platform-specific (.claude/, .cursor/)\n */\n type: 'universal' | 'platform-specific';\n \n /**\n * If platform-specific, which platform?\n */\n platform?: Platform;\n \n /**\n * Confidence score (0-1) based on file analysis\n */\n confidence: number;\n \n /**\n * Detailed file analysis for debugging\n */\n analysis: FormatAnalysis;\n}\n\nexport interface FormatAnalysis {\n universalFiles: number;\n platformSpecificFiles: number;\n detectedPlatforms: Map<Platform, number>; // Platform -> file count\n totalFiles: number;\n samplePaths: {\n universal: string[];\n platformSpecific: string[];\n };\n}\n\n/**\n * Known universal subdirectories in OpenPackage format\n */\nconst UNIVERSAL_SUBDIRS = [\n 'commands',\n 'agents',\n 'rules',\n 'skills',\n 'hooks'\n];\n\n/**\n * Known platform-specific root directories\n */\nconst PLATFORM_ROOT_DIRS: Record<string, Platform> = {\n '.claude': 'claude',\n '.claude-plugin': 'claude-plugin',\n '.cursor': 'cursor',\n '.opencode': 'opencode',\n '.codex': 'codex',\n '.factory': 'factory',\n '.kilocode': 'kilo',\n '.kiro': 'kiro',\n '.qwen': 'qwen',\n '.roo': 'roo',\n '.warp': 'warp',\n '.windsurf': 'windsurf',\n '.augment': 'augment',\n '.agent': 'antigravity'\n};\n\n/**\n * Detect package format from file list\n */\nexport function detectPackageFormat(files: PackageFile[]): PackageFormat {\n // Check for claude-plugin first (highest priority)\n const hasClaudePluginManifest = files.some(f => \n f.path === '.claude-plugin/plugin.json'\n );\n \n if (hasClaudePluginManifest) {\n return {\n type: 'platform-specific',\n platform: 'claude-plugin',\n confidence: 1.0,\n analysis: {\n universalFiles: 0,\n platformSpecificFiles: files.length,\n detectedPlatforms: new Map([['claude-plugin', files.length]]),\n totalFiles: files.length,\n samplePaths: {\n universal: [],\n platformSpecific: ['.claude-plugin/plugin.json']\n }\n }\n };\n }\n \n const analysis: FormatAnalysis = {\n universalFiles: 0,\n platformSpecificFiles: 0,\n detectedPlatforms: new Map(),\n totalFiles: files.length,\n samplePaths: {\n universal: [],\n platformSpecific: []\n }\n };\n \n // Analyze each file\n for (const file of files) {\n const classification = classifyFile(file.path);\n \n if (classification.type === 'universal') {\n analysis.universalFiles++;\n if (analysis.samplePaths.universal.length < 5) {\n analysis.samplePaths.universal.push(file.path);\n }\n } else if (classification.type === 'platform-specific' && classification.platform) {\n analysis.platformSpecificFiles++;\n const count = analysis.detectedPlatforms.get(classification.platform) || 0;\n analysis.detectedPlatforms.set(classification.platform, count + 1);\n \n if (analysis.samplePaths.platformSpecific.length < 5) {\n analysis.samplePaths.platformSpecific.push(file.path);\n }\n }\n }\n \n // Determine format based on analysis\n return determineFormat(analysis);\n}\n\n/**\n * Classify a single file path\n */\nfunction classifyFile(path: string): {\n type: 'universal' | 'platform-specific' | 'other';\n platform?: Platform;\n} {\n const parts = path.split('/');\n const firstPart = parts[0];\n \n // Check for platform-specific root directory\n if (firstPart in PLATFORM_ROOT_DIRS) {\n return {\n type: 'platform-specific',\n platform: PLATFORM_ROOT_DIRS[firstPart]\n };\n }\n \n // Check for platform suffix in filename BEFORE universal subdir\n // (e.g. agents/git/foo.opencode.md is platform-specific, not universal)\n const platformSuffix = extractPlatformSuffixFromPath(path);\n if (platformSuffix) {\n return {\n type: 'platform-specific',\n platform: platformSuffix\n };\n }\n \n // Check for universal subdirectory\n if (UNIVERSAL_SUBDIRS.includes(firstPart)) {\n return { type: 'universal' };\n }\n \n // Root-level files or other directories\n return { type: 'other' };\n}\n\n/**\n * Extract platform suffix from filename (e.g., \"mcp.claude.jsonc\" -> \"claude\")\n */\nfunction extractPlatformSuffixFromPath(path: string): Platform | null {\n const parts = path.split('/');\n const filename = parts[parts.length - 1];\n const nameParts = filename.split('.');\n \n // Need at least 3 parts: name.platform.ext\n if (nameParts.length >= 3) {\n const possiblePlatform = nameParts[nameParts.length - 2];\n if (isPlatformId(possiblePlatform)) {\n return possiblePlatform as Platform;\n }\n }\n \n return null;\n}\n\n/**\n * Determine overall format from analysis\n */\nfunction determineFormat(analysis: FormatAnalysis): PackageFormat {\n const { universalFiles, platformSpecificFiles, detectedPlatforms, totalFiles } = analysis;\n \n // No files analyzed\n if (totalFiles === 0) {\n return {\n type: 'universal',\n confidence: 0,\n analysis\n };\n }\n \n // Calculate ratios\n const universalRatio = universalFiles / totalFiles;\n const platformRatio = platformSpecificFiles / totalFiles;\n \n // Strong universal signal: >70% universal files\n if (universalRatio > 0.7) {\n return {\n type: 'universal',\n confidence: universalRatio,\n analysis\n };\n }\n \n // Strong platform-specific signal: >70% platform files\n if (platformRatio > 0.7) {\n // Determine dominant platform\n let dominantPlatform: Platform | undefined;\n let maxCount = 0;\n \n for (const [platform, count] of detectedPlatforms) {\n if (count > maxCount) {\n maxCount = count;\n dominantPlatform = platform;\n }\n }\n \n if (dominantPlatform) {\n return {\n type: 'platform-specific',\n platform: dominantPlatform,\n confidence: platformRatio,\n analysis\n };\n }\n }\n \n // Mixed or unclear: default to universal with low confidence\n return {\n type: 'universal',\n confidence: Math.max(universalRatio, 0.3),\n analysis\n };\n}\n\n/**\n * Check if a package format indicates platform-specific content\n */\nexport function isPlatformSpecific(format: PackageFormat): boolean {\n return format.type === 'platform-specific' && format.platform !== undefined;\n}\n\n/**\n * Check if conversion is needed for target platform\n */\nexport function needsConversion(\n format: PackageFormat,\n targetPlatform: Platform\n): boolean {\n // Universal format always uses standard flows (no conversion)\n if (format.type === 'universal') {\n return false;\n }\n \n // Platform-specific: needs conversion if target differs from source\n if (format.type === 'platform-specific' && format.platform) {\n return format.platform !== targetPlatform;\n }\n \n return false;\n}\n\n/**\n * Detect package format and create conversion context\n * \n * Convenience function that combines format detection with context creation.\n * Use this at package loading time to get both format and context together.\n */\nexport function detectPackageFormatWithContext(files: PackageFile[]): {\n format: PackageFormat;\n context: PackageConversionContext;\n} {\n const format = detectPackageFormat(files);\n const context = createContextFromFormat(format);\n \n return { format, context };\n}\n\n/**\n * Enhanced Package Format Detection (Two-Tier Strategy)\n * \n * Implements comprehensive format detection with two tiers:\n * \n * Tier 1 (Fast Path): Package-Level Markers\n * - Checks for explicit format markers from platforms.jsonc (e.g., .claude-plugin/plugin.json)\n * - Returns immediately if clear marker found\n * - Fastest path for well-structured packages\n * \n * Tier 2 (Detailed Path): Per-File Detection\n * - Falls back when no clear markers exist\n * - Analyzes each file's frontmatter against platform schemas\n * - Groups files by detected format\n * - Determines overall package format from distribution\n * \n * @param files - List of package files (with optional content/frontmatter)\n * @param targetDir - Optional target directory for local platform config\n * @returns Enhanced package format with comprehensive analysis\n */\nexport async function detectEnhancedPackageFormat(\n files: PackageFile[],\n targetDir?: string\n): Promise<EnhancedPackageFormat> {\n // Import detection modules dynamically to avoid circular deps\n const { detectPlatformMarkers, getPrimaryPlatformFromMarkers, isPurePlatformSpecific } = \n await import('./package-marker-detector.js');\n const { detectFileFormats } = await import('./file-format-detector.js');\n const { \n analyzeFormatDistribution, \n calculatePackageConfidence,\n determinePackageFormat: determineFromDistribution,\n groupFilesByPlatform\n } = await import('./format-distribution-analyzer.js');\n \n // Tier 1: Check for package-level markers (fast path)\n const markers = detectPlatformMarkers(files, targetDir);\n \n // Pure platform-specific package with single marker\n if (isPurePlatformSpecific(markers)) {\n const primaryPlatform = getPrimaryPlatformFromMarkers(markers)!;\n \n return {\n packageFormat: primaryPlatform,\n detectionMethod: 'package-marker',\n confidence: 1.0,\n // For marker fast-path, we still need format groups so conversion can run.\n // Group everything under the detected platform; individual files can be no-ops\n // if no import flow matches (e.g. plugin manifests).\n formatGroups: new Map([[primaryPlatform, files.map(f => f.path)]]),\n markers: {\n matchedPatterns: markers.matches.map(m => ({\n platformId: m.platformId,\n pattern: m.matchedPattern\n })),\n hasOpenPackageYml: markers.hasOpenPackageYml,\n hasPackageYml: markers.hasPackageYml\n },\n analysis: {\n totalFiles: files.length,\n analyzedFiles: 0, // Fast path - didn't analyze files\n skippedFiles: files.length,\n formatDistribution: new Map([[primaryPlatform, files.length]])\n }\n };\n }\n \n // Pure universal package: openpackage.yml at root, no platform markers\n // Skip per-file detection; all files are already in universal format\n if ((markers.hasOpenPackageYml || markers.hasPackageYml) && markers.matches.length === 0) {\n return {\n packageFormat: 'universal',\n detectionMethod: 'package-marker',\n confidence: 1.0,\n formatGroups: new Map([['universal', files.map(f => f.path)]]),\n markers: {\n matchedPatterns: [],\n hasOpenPackageYml: markers.hasOpenPackageYml,\n hasPackageYml: markers.hasPackageYml\n },\n analysis: {\n totalFiles: files.length,\n analyzedFiles: 0,\n skippedFiles: files.length,\n formatDistribution: new Map([['universal', files.length]])\n }\n };\n }\n \n // Tier 2: Per-file detection (detailed path)\n // Detect format for each file\n const fileFormats = detectFileFormats(files, targetDir);\n \n // Analyze distribution\n const distribution = analyzeFormatDistribution(fileFormats);\n const confidence = calculatePackageConfidence(distribution, fileFormats);\n const packageFormat = determineFromDistribution(distribution);\n const formatGroups = groupFilesByPlatform(fileFormats);\n \n // Count analyzed vs skipped files\n const analyzedFiles = fileFormats.size;\n const skippedFiles = files.length - analyzedFiles;\n \n return {\n packageFormat,\n detectionMethod: 'per-file',\n confidence,\n fileFormats,\n formatGroups,\n markers: markers.matches.length > 0 ? {\n matchedPatterns: markers.matches.map(m => ({\n platformId: m.platformId,\n pattern: m.matchedPattern\n })),\n hasOpenPackageYml: markers.hasOpenPackageYml,\n hasPackageYml: markers.hasPackageYml\n } : undefined,\n analysis: {\n totalFiles: files.length,\n analyzedFiles,\n skippedFiles,\n formatDistribution: distribution.counts\n }\n };\n}\n", "import { join } from 'path';\nimport { exists, readTextFile } from '../../utils/fs.js';\nimport { logger } from '../../utils/logger.js';\nimport { ValidationError } from '../../utils/errors.js';\nimport { CLAUDE_PLUGIN_PATHS } from '../../constants/index.js';\nimport type { MarketplacePluginEntry } from './marketplace-handler.js';\n\n/**\n * Claude Code plugin manifest schema (from plugin.json)\n */\nexport interface ClaudePluginManifest {\n name: string;\n version?: string;\n description?: string;\n author?: {\n name?: string;\n email?: string;\n url?: string;\n };\n homepage?: string;\n repository?: string | {\n type?: string;\n url?: string;\n };\n license?: string;\n keywords?: string[];\n // Component configuration\n commands?: string | string[];\n agents?: string | string[];\n hooks?: string | object;\n mcpServers?: string | object;\n lspServers?: string | object;\n}\n\n/**\n * Metadata resolution result with source information\n */\ninterface ResolvedPluginMetadata {\n /** Resolved plugin manifest */\n manifest: ClaudePluginManifest;\n /** Source of metadata */\n source: 'plugin.json' | 'marketplace' | 'merged';\n}\n\n/**\n * Resolve plugin metadata from multiple sources with priority handling.\n * \n * Resolution logic:\n * 1. Check if plugin.json exists\n * 2. If exists AND strict !== false:\n * - Use plugin.json as primary\n * - Merge marketplace entry fields (as defaults)\n * - Return with source: 'merged'\n * 3. If exists AND strict === false:\n * - Log warning (misconfiguration - strict:false but plugin.json exists)\n * - Use plugin.json anyway (safest fallback)\n * - Return with source: 'plugin.json'\n * 4. If NOT exists AND strict === false:\n * - Use marketplace entry as full definition\n * - Return with source: 'marketplace'\n * 5. If NOT exists AND strict !== false:\n * - Throw error (plugin.json required)\n * \n * @param pluginDir - Absolute path to plugin directory\n * @param marketplaceEntry - Optional marketplace entry for this plugin\n * @returns Resolved metadata and source\n * @throws ValidationError if plugin.json is required but missing or invalid\n */\nexport async function resolvePluginMetadata(\n pluginDir: string,\n marketplaceEntry?: MarketplacePluginEntry\n): Promise<ResolvedPluginMetadata> {\n const manifestPath = join(pluginDir, CLAUDE_PLUGIN_PATHS.PLUGIN_MANIFEST);\n const hasPluginJson = await exists(manifestPath);\n const isStrictFalse = marketplaceEntry?.strict === false;\n \n // Case 1 & 2: plugin.json exists\n if (hasPluginJson) {\n let pluginManifest: ClaudePluginManifest;\n \n try {\n const content = await readTextFile(manifestPath);\n pluginManifest = JSON.parse(content);\n } catch (error) {\n throw new ValidationError(\n `Failed to parse plugin manifest at ${manifestPath}: ${error}`\n );\n }\n \n // Case 2: strict:false but plugin.json exists (misconfiguration)\n if (isStrictFalse) {\n logger.warn(\n `Plugin at '${pluginDir}' has strict:false in marketplace but contains plugin.json. ` +\n `Using plugin.json as primary source. Consider removing plugin.json or setting strict:true.`\n );\n \n return {\n manifest: pluginManifest,\n source: 'plugin.json'\n };\n }\n \n // Case 1: strict is true or undefined - merge with marketplace entry\n if (marketplaceEntry) {\n const merged = mergePluginMetadata(pluginManifest, marketplaceEntry);\n \n return {\n manifest: merged,\n source: 'merged'\n };\n }\n \n // No marketplace entry, just use plugin.json\n return {\n manifest: pluginManifest,\n source: 'plugin.json'\n };\n }\n \n // Case 4: No plugin.json, strict:false - use marketplace entry\n if (isStrictFalse && marketplaceEntry) {\n const manifest = marketplaceEntryToManifest(marketplaceEntry);\n \n return {\n manifest,\n source: 'marketplace'\n };\n }\n \n // Case 5: No plugin.json and not strict:false - error\n if (marketplaceEntry) {\n throw new ValidationError(\n `Plugin '${marketplaceEntry.name}' at '${pluginDir}' is missing plugin.json. ` +\n `Either add .claude-plugin/plugin.json or set \"strict\": false in marketplace entry.`\n );\n } else {\n throw new ValidationError(\n `Plugin at '${pluginDir}' is missing plugin.json at ${CLAUDE_PLUGIN_PATHS.PLUGIN_MANIFEST}`\n );\n }\n}\n\n/**\n * Merge plugin.json with marketplace entry.\n * Plugin.json fields take priority, marketplace entry provides defaults.\n */\nfunction mergePluginMetadata(\n pluginManifest: ClaudePluginManifest,\n marketplaceEntry: MarketplacePluginEntry\n): ClaudePluginManifest {\n return {\n name: pluginManifest.name, // Always use plugin.json name\n version: pluginManifest.version ?? marketplaceEntry.version,\n description: pluginManifest.description ?? marketplaceEntry.description,\n author: pluginManifest.author ?? marketplaceEntry.author,\n homepage: pluginManifest.homepage ?? marketplaceEntry.homepage,\n repository: pluginManifest.repository ?? marketplaceEntry.repository,\n license: pluginManifest.license ?? marketplaceEntry.license,\n keywords: pluginManifest.keywords ?? marketplaceEntry.keywords,\n commands: pluginManifest.commands ?? marketplaceEntry.commands,\n agents: pluginManifest.agents ?? marketplaceEntry.agents,\n hooks: pluginManifest.hooks ?? marketplaceEntry.hooks,\n mcpServers: pluginManifest.mcpServers ?? marketplaceEntry.mcpServers,\n lspServers: pluginManifest.lspServers ?? marketplaceEntry.lspServers,\n };\n}\n\n/**\n * Convert marketplace entry to plugin manifest (for strict:false plugins)\n */\nfunction marketplaceEntryToManifest(entry: MarketplacePluginEntry): ClaudePluginManifest {\n return {\n name: entry.name,\n version: entry.version,\n description: entry.description,\n author: entry.author,\n homepage: entry.homepage,\n repository: entry.repository,\n license: entry.license,\n keywords: entry.keywords,\n commands: entry.commands,\n agents: entry.agents,\n hooks: entry.hooks,\n mcpServers: entry.mcpServers,\n lspServers: entry.lspServers,\n };\n}\n", "import { relative, basename } from 'path';\nimport { readFile } from 'fs/promises';\nimport { Package, PackageFile, PackageYml } from '../../types/index.js';\nimport { loadPackageConfig } from '../package-context.js';\nimport { extractPackageFromTarball } from '../../utils/tarball.js';\nimport { walkFiles, readTextFile } from '../../utils/fs.js';\nimport { isJunk } from 'junk';\nimport { logger } from '../../utils/logger.js';\nimport { ValidationError } from '../../utils/errors.js';\nimport { FILE_PATTERNS, PACKAGE_PATHS, CLAUDE_PLUGIN_PATHS } from '../../constants/index.js';\nimport { PACKAGE_BOUNDARY_DIRS } from '../../constants/workspace.js';\nimport { detectPluginType, detectPluginWithMarketplace, hasPluginContent } from './plugin-detector.js';\nimport { transformPluginToPackage } from './plugin-transformer.js';\nimport type { MarketplacePluginEntry } from './marketplace-handler.js';\nimport { generateGitHubPackageName } from '../../utils/plugin-naming.js';\nimport * as yaml from 'js-yaml';\n\nexport type PathSourceType = 'directory' | 'tarball';\n\n/**\n * Context for loading packages with naming information and marketplace metadata.\n */\nexport interface PackageLoadContext {\n gitUrl?: string;\n path?: string;\n resourcePath?: string;\n repoPath?: string;\n packageName?: string; // Optional override (avoid using - let transformer generate)\n marketplaceEntry?: MarketplacePluginEntry;\n}\n\n/**\n * Infer the source type from a path string.\n */\nexport function inferSourceType(path: string): PathSourceType {\n return path.endsWith(FILE_PATTERNS.TGZ_FILES) || path.endsWith(FILE_PATTERNS.TAR_GZ_FILES) ? 'tarball' : 'directory';\n}\n\n/**\n * Load a package from a local directory.\n * Reads all files from the directory and loads openpackage.yml.\n * \n * If the directory is a Claude Code plugin, transforms it to OpenPackage format.\n * \n * @param dirPath - Path to directory\n * @param context - Optional context for scoped naming (GitHub URL, subdirectory)\n */\nexport async function loadPackageFromDirectory(\n dirPath: string,\n context?: PackageLoadContext\n): Promise<Package> {\n // Check if this is a Claude Code plugin (with marketplace context if available)\n const pluginDetection = await detectPluginWithMarketplace(dirPath, context?.marketplaceEntry);\n \n if (pluginDetection.isPlugin && (pluginDetection.type === 'individual' || pluginDetection.type === 'marketplace-defined')) {\n logger.info(`Detected Claude Code plugin (${pluginDetection.type}), transforming to OpenPackage format`, { dirPath });\n const { package: pkg } = await transformPluginToPackage(dirPath, context);\n return pkg;\n }\n \n // If it's a marketplace, we need to handle plugin selection (done upstream in install command).\n // However, when a resourcePath is set, the user explicitly requested a specific resource within\n // the marketplace \u2014 treat it as a marketplace-defined plugin rather than blocking with an error.\n // This provides defense-in-depth in case upstream content root resolution has an edge case.\n if (pluginDetection.isPlugin && pluginDetection.type === 'marketplace') {\n if (context?.resourcePath) {\n logger.info('Marketplace directory loaded with explicit resourcePath, treating as marketplace-defined plugin', {\n dirPath,\n resourcePath: context.resourcePath\n });\n const syntheticEntry: MarketplacePluginEntry = {\n strict: false,\n name: context?.packageName ?? basename(dirPath),\n source: '.'\n };\n const { package: pkg } = await transformPluginToPackage(dirPath, {\n ...context,\n marketplaceEntry: syntheticEntry\n });\n return pkg;\n }\n throw new ValidationError(\n `Directory '${dirPath}' is a Claude Code plugin marketplace. ` +\n `Marketplace installation requires plugin selection and should be handled by the install command.`\n );\n }\n \n // Load openpackage.yml for regular packages\n let config = await loadPackageConfig(dirPath);\n if (!config) {\n // Marketplace-defined plugins (no plugin.json) may have only plugin content (commands/agents/skills).\n // Treat as loadable so install can proceed without marketplace selection.\n const hasContent = await hasPluginContent(dirPath);\n if (hasContent) {\n const syntheticEntry: MarketplacePluginEntry = {\n strict: false,\n name: context?.packageName ?? basename(dirPath),\n source: '.' // minimal spec for marketplace-defined plugin without marketplace manifest\n };\n const marketplaceDefined = await detectPluginWithMarketplace(dirPath, syntheticEntry);\n if (marketplaceDefined.isPlugin && marketplaceDefined.type === 'marketplace-defined') {\n const { package: pkg } = await transformPluginToPackage(dirPath, {\n ...context,\n marketplaceEntry: syntheticEntry\n });\n return pkg;\n }\n }\n // Resource-centric installs (Phase 3) can operate on directories without openpackage.yml,\n // as long as they match installable patterns (base detection ensures this upstream).\n if (!context?.resourcePath) {\n throw new ValidationError(\n `Directory '${dirPath}' is not a valid OpenPackage directory or Claude Code plugin. ` +\n `Missing ${FILE_PATTERNS.OPENPACKAGE_YML} or ${CLAUDE_PLUGIN_PATHS.PLUGIN_MANIFEST}`\n );\n }\n\n const fallbackBaseName = basename(dirPath);\n const fallbackName = context?.gitUrl\n ? generateGitHubPackageName({\n gitUrl: context.gitUrl,\n path: context.path,\n resourcePath: context.resourcePath,\n packageName: fallbackBaseName,\n repoPath: context.repoPath\n })\n : fallbackBaseName;\n\n config = {\n name: fallbackName,\n version: '0.0.0'\n } satisfies PackageYml;\n }\n\n // Apply GitHub scoping for packages from GitHub sources\n // This ensures consistent naming: gh@username/repo or gh@username/repo/path\n if (context?.gitUrl) {\n const originalName = config.name;\n const scopedName = generateGitHubPackageName({\n gitUrl: context.gitUrl,\n path: context.path,\n resourcePath: context.resourcePath,\n packageName: originalName, // Pass original name for non-GitHub sources\n repoPath: context.repoPath\n });\n \n // Only override if GitHub scoping was applied (name changed)\n if (scopedName !== originalName) {\n config.name = scopedName;\n }\n }\n\n // Discover all files in the directory\n const files: PackageFile[] = [];\n \n try {\n for await (const fullPath of walkFiles(dirPath, [], { excludeDirs: PACKAGE_BOUNDARY_DIRS })) {\n const relativePath = relative(dirPath, fullPath);\n \n // Filter out junk files\n if (isJunk(basename(relativePath))) {\n continue;\n }\n \n const content = await readTextFile(fullPath);\n \n files.push({\n path: relativePath,\n content,\n encoding: 'utf8'\n });\n }\n \n return {\n metadata: config,\n files\n };\n } catch (error) {\n logger.error(`Failed to load package from directory: ${dirPath}`, { error });\n throw new ValidationError(`Failed to load package from directory: ${error}`);\n }\n}\n\n/**\n * Load a package from a tarball file.\n * Extracts to a temporary location, reads files, then cleans up.\n */\nexport async function loadPackageFromTarball(tarballPath: string): Promise<Package> {\n // Read tarball file\n let tarballBuffer: Buffer;\n try {\n tarballBuffer = await readFile(tarballPath);\n } catch (error) {\n throw new ValidationError(`Failed to read tarball file '${tarballPath}': ${error}`);\n }\n \n // Extract tarball\n const extracted = await extractPackageFromTarball(tarballBuffer);\n \n // Find openpackage.yml in extracted files\n const packageYmlFile = extracted.files.find(\n f => f.path === PACKAGE_PATHS.MANIFEST_RELATIVE || f.path === 'openpackage.yml'\n );\n \n if (!packageYmlFile) {\n throw new ValidationError(\n `Tarball '${tarballPath}' does not contain a valid ${FILE_PATTERNS.OPENPACKAGE_YML} file`\n );\n }\n \n // Parse openpackage.yml content\n const config = yaml.load(packageYmlFile.content) as PackageYml;\n \n if (!config.name) {\n throw new ValidationError(\n `Tarball '${tarballPath}' contains invalid ${FILE_PATTERNS.OPENPACKAGE_YML}: missing name field`\n );\n }\n \n logger.debug(`Loaded package ${config.name}@${config.version} from tarball: ${tarballPath}`);\n \n return {\n metadata: config,\n files: extracted.files\n };\n}\n\n/**\n * Load a package from either a directory or tarball path.\n * Automatically detects the source type.\n * \n * @param path - Path to package\n * @param context - Optional context for scoped naming\n */\nexport async function loadPackageFromPath(\n path: string,\n context?: PackageLoadContext\n): Promise<Package> {\n const sourceType = inferSourceType(path);\n \n if (sourceType === 'tarball') {\n return await loadPackageFromTarball(path);\n } else {\n return await loadPackageFromDirectory(path, context);\n }\n}\n\n", "import * as tar from 'tar';\nimport { createHash } from 'crypto';\nimport { unlink, readdir, stat, readFile, writeFile } from 'fs/promises';\nimport { join } from 'path';\nimport { tmpdir } from 'os';\nimport { PackageFile, Package } from '../types/index.js';\nimport { logger } from './logger.js';\nimport { ValidationError } from './errors.js';\nimport { writeTextFile, readTextFile, ensureDir, exists } from './fs.js';\nimport { UNVERSIONED, FILE_PATTERNS } from '../constants/index.js';\n\n/**\n * Tarball utilities for package packaging and extraction\n */\n\nexport interface TarballInfo {\n buffer: Buffer;\n size: number;\n checksum: string;\n}\n\nexport interface ExtractedPackage {\n files: PackageFile[];\n checksum: string;\n}\n\n/**\n * Create a tarball from package files\n */\nexport async function createTarballFromPackage(pkg: Package): Promise<TarballInfo> {\n logger.debug(`Creating tarball for package: ${pkg.metadata.name}@${pkg.metadata.version}`);\n \n const tempDir = join(tmpdir(), `openpackage-tarball-${Date.now()}`);\n const tarballPath = join(tempDir, 'package.tar.gz');\n \n try {\n // Create temp directory\n await ensureDir(tempDir);\n \n // Write package files to temp directory\n for (const file of pkg.files) {\n const filePath = join(tempDir, file.path);\n await ensureDir(join(filePath, '..'));\n await writeTextFile(filePath, file.content, (file.encoding as BufferEncoding) || 'utf8');\n }\n \n // Create tarball\n await tar.create(\n {\n gzip: true,\n file: tarballPath,\n cwd: tempDir\n },\n pkg.files.map(f => f.path)\n );\n \n // Read tarball into buffer\n const tarballBuffer = await readFile(tarballPath);\n \n // Calculate checksum\n const checksum = createHash('sha256').update(tarballBuffer).digest('hex');\n \n logger.debug(`Tarball created: ${tarballBuffer.length} bytes, checksum: ${checksum}`);\n \n return {\n buffer: tarballBuffer,\n size: tarballBuffer.length,\n checksum\n };\n } catch (error) {\n logger.error('Failed to create tarball', { error, packageName: pkg.metadata.name });\n throw new ValidationError(`Failed to create tarball: ${error}`);\n } finally {\n // Clean up temp directory\n try {\n if (await exists(tarballPath)) {\n await unlink(tarballPath);\n }\n // Note: We're not removing the temp dir itself as it may have subdirectories\n // The OS temp cleanup will handle this\n } catch (cleanupError) {\n logger.warn('Failed to clean up temp files', { cleanupError });\n }\n }\n}\n\n/**\n * Extract package files from tarball buffer\n */\nexport async function extractPackageFromTarball(\n tarballBuffer: Buffer, \n expectedChecksum?: string\n): Promise<ExtractedPackage> {\n logger.debug(`Extracting package from tarball (${tarballBuffer.length} bytes)`);\n \n const tempDir = join(tmpdir(), `openpackage-extract-${Date.now()}`);\n\n const isGzipBuffer = (buffer: Buffer): boolean => {\n // gzip magic header: 1f 8b\n return buffer.length >= 2 && buffer[0] === 0x1f && buffer[1] === 0x8b;\n };\n\n const previewBufferAsText = (buffer: Buffer, maxBytes: number = 200): string => {\n const slice = buffer.subarray(0, Math.min(maxBytes, buffer.length));\n // Replace control characters to keep logs readable\n return slice\n .toString('utf8')\n .replace(/[\\u0000-\\u0008\\u000B\\u000C\\u000E-\\u001F\\u007F]/g, '\uFFFD');\n };\n\n const isGzip = isGzipBuffer(tarballBuffer);\n const tarballPath = join(tempDir, isGzip ? 'package.tar.gz' : 'package.tar');\n \n try {\n // Verify checksum if provided\n const actualChecksum = createHash('sha256').update(tarballBuffer).digest('hex');\n \n if (expectedChecksum && actualChecksum !== expectedChecksum) {\n throw new ValidationError(\n `Tarball checksum mismatch. Expected: ${expectedChecksum}, Got: ${actualChecksum}`\n );\n }\n \n // Create temp directory and write tarball\n await ensureDir(tempDir);\n await writeFile(tarballPath, tarballBuffer);\n \n // Extract tarball\n await tar.extract({\n file: tarballPath,\n cwd: tempDir\n });\n \n // Read extracted files\n const files: PackageFile[] = [];\n const extractFiles = async (dir: string, basePath: string = ''): Promise<void> => {\n const entries = await readdir(dir);\n \n for (const entry of entries) {\n const fullPath = join(dir, entry);\n const relativePath = basePath ? join(basePath, entry) : entry;\n \n const stats = await stat(fullPath);\n if (stats.isFile()) {\n const content = await readTextFile(fullPath);\n files.push({\n path: relativePath,\n content,\n encoding: 'utf8'\n });\n } else if (stats.isDirectory()) {\n await extractFiles(fullPath, relativePath);\n }\n }\n };\n \n await extractFiles(tempDir);\n \n // Remove the tarball file itself from the list\n const filteredFiles = files.filter(f => f.path !== 'package.tar.gz' && f.path !== 'package.tar');\n \n logger.debug(`Extracted ${filteredFiles.length} files from tarball`);\n \n return {\n files: filteredFiles,\n checksum: actualChecksum\n };\n } catch (error) {\n const hint = (() => {\n // If we didn't get gzip bytes, the first bytes are often informative (JSON/XML)\n if (isGzip) {\n return undefined;\n }\n const preview = previewBufferAsText(tarballBuffer);\n const trimmed = preview.trimStart();\n if (trimmed.startsWith('{') || trimmed.startsWith('[') || trimmed.startsWith('<')) {\n return `Downloaded payload does not look like a tarball (starts with: '${trimmed.slice(0, 60)}')`;\n }\n return `Downloaded payload does not look gzip-compressed (first bytes: '${preview.slice(0, 60)}')`;\n })();\n\n logger.error('Failed to extract tarball', { error, isGzip, hint });\n const baseMessage = error instanceof Error ? error.message : String(error);\n throw new ValidationError(hint ? `${baseMessage}. ${hint}` : `Failed to extract tarball: ${baseMessage}`);\n } finally {\n // Clean up temp files\n try {\n if (await exists(tarballPath)) {\n await unlink(tarballPath);\n }\n } catch (cleanupError) {\n logger.warn('Failed to clean up temp files', { cleanupError });\n }\n }\n}\n\n/**\n * Create FormData for multipart upload\n */\nexport function createFormDataForUpload(\n packageName: string,\n version: string | undefined,\n tarballInfo: TarballInfo\n): FormData {\n const formData = new FormData();\n \n // Add form fields\n formData.append('name', packageName);\n if (version) {\n formData.append('version', version);\n }\n \n // Add tarball file\n const blob = new Blob([tarballInfo.buffer], { type: 'application/gzip' });\n const filename = version ? `${packageName}-${version}${FILE_PATTERNS.TGZ_FILES}` : `${packageName}-${UNVERSIONED}${FILE_PATTERNS.TGZ_FILES}`;\n formData.append('file', blob, filename);\n \n return formData;\n}\n\n/**\n * Verify tarball integrity\n */\nexport function verifyTarballIntegrity(\n buffer: Buffer,\n expectedSize?: number,\n expectedChecksum?: string\n): boolean {\n try {\n // Check size\n if (expectedSize && buffer.length !== expectedSize) {\n logger.warn('Tarball size mismatch', { \n expected: expectedSize, \n actual: buffer.length \n });\n return false;\n }\n \n // Check checksum\n if (expectedChecksum) {\n const actualChecksum = createHash('sha256').update(buffer).digest('hex');\n if (actualChecksum !== expectedChecksum) {\n logger.warn('Tarball checksum mismatch', { \n expected: expectedChecksum, \n actual: actualChecksum \n });\n return false;\n }\n }\n \n return true;\n } catch (error) {\n logger.error('Tarball integrity check failed', { error });\n return false;\n }\n}\n\n", "import type { PackageYml, ExecutionContext } from '../../../types/index.js';\nimport type { PackageSource } from '../unified/context.js';\nimport type { InstallOptions } from '../../../types/index.js';\nimport type { EnhancedPackageFormat } from '../detection-types.js';\nimport type { ConversionContext } from '../conversion-context.js';\nimport type { UnifiedSpinner } from '../../ports/output.js';\n\n/**\n * Result of loading a package from a source\n */\nexport interface LoadedPackage {\n /** Package metadata */\n metadata: PackageYml;\n \n /** Package name (from package.yml or derived) */\n packageName: string;\n \n /** Package version */\n version: string;\n \n /** Absolute path to package content root */\n contentRoot: string;\n \n /** Source type for tracking */\n source: 'registry' | 'path' | 'git' | 'workspace';\n \n /** Plugin-specific metadata (will be stored in context.source.pluginMetadata) */\n pluginMetadata?: {\n isPlugin: boolean;\n pluginType?: 'individual' | 'marketplace';\n format?: any;\n manifestPath?: string;\n };\n \n /** Additional source metadata */\n sourceMetadata?: {\n /** For git sources: repository path */\n repoPath?: string;\n \n /** For git sources: commit SHA of cached version */\n commitSha?: string;\n \n /** Base detection result (for resource model) */\n baseDetection?: any;\n };\n \n /**\n * Format detection metadata (Phase 4)\n * Set by conversion coordinator after format detection\n */\n formatDetection?: EnhancedPackageFormat;\n \n /**\n * Whether package was pre-converted (Phase 4)\n * True if package was converted from platform format to universal\n */\n preConverted?: boolean;\n \n /**\n * Conversion context (Phase 4)\n * Contains conversion metadata and statistics\n */\n conversionContext?: ConversionContext;\n}\n\n/**\n * Interface for package source loaders\n */\nexport interface PackageSourceLoader {\n /**\n * Check if this loader can handle the given source\n */\n canHandle(source: PackageSource): boolean;\n \n /**\n * Load package from the source\n * \n * @param source - Package source information\n * @param options - Install options\n * @param execContext - Execution context (uses sourceCwd for resolving inputs)\n * @param spinner - Optional spinner from the calling phase for progress updates\n */\n load(\n source: PackageSource,\n options: InstallOptions,\n execContext: ExecutionContext,\n spinner?: UnifiedSpinner\n ): Promise<LoadedPackage>;\n}\n\n/**\n * Base error for source loading failures\n */\nexport class SourceLoadError extends Error {\n constructor(\n public source: PackageSource,\n message: string,\n public cause?: Error\n ) {\n super(message);\n this.name = 'SourceLoadError';\n }\n}\n", "import os from 'os';\nimport path from 'path';\n\nimport { DIR_PATTERNS, OPENPACKAGE_DIRS, UNVERSIONED } from '../../constants/index.js';\nimport { listPackageVersions } from '../directory.js';\nimport { exists } from '../../utils/fs.js';\nimport { parsePackageYml } from '../../utils/package-yml.js';\nimport { getLocalPackageDir } from '../../utils/paths.js';\nimport { SCOPED_PACKAGE_REGEX, normalizePackageName } from '../../utils/package-name.js';\nimport semver from 'semver';\nimport type { InstallResolutionMode } from './types.js';\n\nexport type MutableSourceKind = 'workspaceMutable' | 'globalMutable';\n\nexport interface MutableSourceInfo {\n kind: MutableSourceKind;\n packageRootDir: string;\n version: string;\n}\n\nexport interface CandidateVersionsResult {\n localVersions: string[];\n sourceKind?: MutableSourceKind | 'registry';\n contentRoot?: string;\n}\n\nfunction getGlobalMutablePackageDir(packageName: string): string {\n const normalizedName = normalizePackageName(packageName);\n const scopedMatch = normalizedName.match(SCOPED_PACKAGE_REGEX);\n const baseDir = path.join(os.homedir(), DIR_PATTERNS.OPENPACKAGE, OPENPACKAGE_DIRS.PACKAGES);\n\n if (scopedMatch) {\n const [, scope, localName] = scopedMatch;\n return path.join(baseDir, `@${scope}`, localName);\n }\n\n return path.join(baseDir, normalizedName);\n}\n\nasync function loadMutableSourceVersion(packageRootDir: string): Promise<string | null> {\n const manifestPath = path.join(packageRootDir, 'openpackage.yml');\n if (!(await exists(manifestPath))) {\n return null;\n }\n\n try {\n const manifest = await parsePackageYml(manifestPath);\n return manifest.version ?? UNVERSIONED;\n } catch {\n return null;\n }\n}\n\nexport async function detectWorkspaceMutableSource(\n cwd: string,\n packageName: string\n): Promise<MutableSourceInfo | null> {\n const packageRootDir = getLocalPackageDir(cwd, packageName);\n const version = await loadMutableSourceVersion(packageRootDir);\n if (!version) {\n return null;\n }\n\n return { kind: 'workspaceMutable', packageRootDir, version };\n}\n\nexport async function detectGlobalMutableSource(\n packageName: string\n): Promise<MutableSourceInfo | null> {\n const packageRootDir = getGlobalMutablePackageDir(packageName);\n const version = await loadMutableSourceVersion(packageRootDir);\n if (!version) {\n return null;\n }\n\n return { kind: 'globalMutable', packageRootDir, version };\n}\n\nexport async function resolveCandidateVersionsForInstall(args: {\n cwd: string;\n packageName: string;\n mode: InstallResolutionMode;\n}): Promise<CandidateVersionsResult> {\n const { cwd, packageName, mode } = args;\n\n if (mode === 'remote-primary') {\n return { localVersions: [] };\n }\n\n const workspaceSource = await detectWorkspaceMutableSource(cwd, packageName);\n if (workspaceSource) {\n return {\n localVersions: [workspaceSource.version],\n sourceKind: workspaceSource.kind,\n contentRoot: workspaceSource.packageRootDir\n };\n }\n\n const globalMutable = await detectGlobalMutableSource(packageName);\n if (globalMutable) {\n return {\n localVersions: [globalMutable.version],\n sourceKind: globalMutable.kind,\n contentRoot: globalMutable.packageRootDir\n };\n }\n\n const registryVersions = await listPackageVersions(packageName);\n return {\n localVersions: registryVersions,\n sourceKind: 'registry'\n };\n}\n\n// Dynamic import to break circular dependency:\n// local-source-resolution \u2192 resolve-named-dependency \u2192 resolve-registry-version \u2192 local-source-resolution\nlet _resolveNamedDependency: typeof import('../source-resolution/resolve-named-dependency.js').resolveNamedDependency;\n\nexport async function resolvePackageContentRoot(args: {\n cwd: string;\n packageName: string;\n version: string;\n}): Promise<string> {\n if (!_resolveNamedDependency) {\n _resolveNamedDependency = (await import('../source-resolution/resolve-named-dependency.js')).resolveNamedDependency;\n }\n const resolved = await _resolveNamedDependency(args.packageName, args.cwd, { version: args.version });\n return resolved.absolutePath;\n}\n\nexport async function maybeWarnHigherRegistryVersion(args: {\n packageName: string;\n selectedVersion: string;\n}): Promise<string | undefined> {\n if (!semver.valid(args.selectedVersion)) {\n return undefined;\n }\n\n const registryVersions = await listPackageVersions(args.packageName);\n if (registryVersions.length === 0) {\n return undefined;\n }\n\n const highest = registryVersions[0];\n if (semver.valid(highest) && semver.gt(highest, args.selectedVersion)) {\n return `Newer version available in local registry: ${args.packageName}@${args.selectedVersion} (selected) < ${args.packageName}@${highest} (registry)`;\n }\n\n return undefined;\n}\n", "import path from 'path';\n\nimport { getRegistryDirectories } from '../directory.js';\nimport { normalizePackageName } from '../../utils/package-name.js';\nimport { selectInstallVersionUnified } from '../install/version-selection.js';\nimport { resolveCandidateVersionsForInstall } from '../install/local-source-resolution.js';\nimport { DEFAULT_VERSION_CONSTRAINT, REGISTRY_PATH_PREFIXES } from '../../constants/index.js';\nimport type { InstallResolutionMode } from '../install/types.js';\n\nexport interface ResolveRegistryVersionOptions {\n /**\n * Semver constraint; defaults to '*'.\n */\n constraint?: string;\n /**\n * Resolution strategy (defaults to install's \"default\" mode: local-first with remote fallback).\n */\n mode?: InstallResolutionMode;\n /**\n * CWD for workspace package resolution. When provided, workspace and global packages\n * are included in local version candidates (not just registry).\n */\n cwd?: string;\n profile?: string;\n apiKey?: string;\n explicitPrereleaseIntent?: boolean;\n}\n\nimport type { ResolutionSource } from '../../constants/index.js';\n\nexport interface ResolveRegistryVersionResult {\n version: string;\n declaredPath: string;\n absolutePath: string;\n resolutionSource?: ResolutionSource;\n}\n\n/**\n * Resolve a registry version and construct both declared (tilde) and absolute paths.\n */\nexport async function resolveRegistryVersion(\n packageName: string,\n options: ResolveRegistryVersionOptions = {}\n): Promise<ResolveRegistryVersionResult> {\n const normalizedName = normalizePackageName(packageName);\n const mode: InstallResolutionMode = options.mode ?? 'default';\n const constraint = options.constraint ?? DEFAULT_VERSION_CONSTRAINT;\n\n // Include workspace and global packages in local version candidates when cwd is provided\n let localVersions: string[] | undefined;\n if (options.cwd && mode !== 'remote-primary') {\n const candidates = await resolveCandidateVersionsForInstall({\n cwd: options.cwd,\n packageName: normalizedName,\n mode\n });\n localVersions = candidates.localVersions;\n }\n\n const selection = await selectInstallVersionUnified({\n packageName: normalizedName,\n constraint,\n mode,\n localVersions,\n explicitPrereleaseIntent: options.explicitPrereleaseIntent,\n profile: options.profile,\n apiKey: options.apiKey\n });\n\n if (!selection.selectedVersion) {\n throw new Error(\n `Unable to resolve a version for '${normalizedName}' with constraint '${constraint}'.`\n );\n }\n\n const version = selection.selectedVersion;\n const { packages: registryRoot } = getRegistryDirectories();\n\n const absolutePath = path.join(registryRoot, normalizedName, version, path.sep);\n const declaredPath = `${REGISTRY_PATH_PREFIXES.BASE}${normalizedName}/${version}/`;\n\n return {\n version,\n declaredPath,\n absolutePath,\n resolutionSource: selection.resolutionSource\n };\n}\n", "import * as semver from 'semver';\nimport { listPackageVersions } from '../directory.js';\nimport {\n fetchRemotePackageMetadata,\n type RemotePullFailure\n} from '../remote-pull.js';\nimport type { PullPackageResponse } from '../../types/api.js';\nimport { describeRemoteFailure } from './remote-reporting.js';\nimport { InstallResolutionMode } from './types.js';\nimport {\n selectVersionWithWipPolicy,\n type VersionSelectionOptions,\n type VersionSelectionResult\n} from '../../utils/version-ranges.js';\nimport { isScopedName } from '../scoping/package-scoping.js';\nimport type { OutputPort } from '../ports/output.js';\nimport { resolveOutput } from '../ports/resolve.js';\nimport { extractRemoteErrorReason } from '../../utils/error-reasons.js';\nimport { UNVERSIONED } from '../../constants/index.js';\nimport { createCacheManager } from '../cache-manager.js';\n\nexport interface VersionSourceSummary {\n localVersions: string[];\n remoteVersions: string[];\n availableVersions: string[];\n remoteStatus: 'skipped' | 'success' | 'failed';\n warnings: string[];\n remoteError?: string;\n fallbackToLocalOnly?: boolean;\n remoteFailure?: RemotePullFailure;\n}\n\nexport interface GatherVersionSourcesArgs {\n packageName: string;\n mode: InstallResolutionMode;\n localVersions?: string[];\n remoteVersions?: string[];\n profile?: string;\n apiKey?: string;\n}\n\nexport interface InstallVersionSelectionArgs extends GatherVersionSourcesArgs {\n constraint: string;\n explicitPrereleaseIntent?: boolean;\n selectionOptions?: VersionSelectionOptions;\n}\n\nexport interface InstallVersionSelectionResult {\n selectedVersion: string | null;\n selection: VersionSelectionResult;\n sources: VersionSourceSummary;\n constraint: string;\n mode: InstallResolutionMode;\n}\n\nexport interface UnifiedInstallVersionSelectionArgs {\n packageName: string;\n constraint: string;\n mode: InstallResolutionMode;\n selectionOptions?: VersionSelectionOptions;\n explicitPrereleaseIntent?: boolean;\n profile?: string;\n apiKey?: string;\n localVersions?: string[];\n remoteVersions?: string[];\n filterAvailableVersions?: (versions: string[]) => string[];\n}\n\nexport interface UnifiedInstallVersionSelectionResult extends InstallVersionSelectionResult {\n resolutionSource?: 'local' | 'remote';\n}\n\ninterface RemoteVersionLookupOptions {\n profile?: string;\n apiKey?: string;\n skipCache?: boolean; // For --remote flag, bypass metadata cache\n}\n\ninterface RemoteVersionLookupSuccess {\n success: true;\n versions: string[];\n}\n\ninterface RemoteVersionLookupFailure {\n success: false;\n failure: RemotePullFailure;\n}\n\ntype RemoteVersionLookupResult = RemoteVersionLookupSuccess | RemoteVersionLookupFailure;\n\nexport async function gatherVersionSourcesForInstall(args: GatherVersionSourcesArgs): Promise<VersionSourceSummary> {\n const normalizedLocal = normalizeAndSortVersions(\n args.localVersions ?? await listPackageVersions(args.packageName)\n );\n let remoteVersions: string[] = [];\n let remoteStatus: VersionSourceSummary['remoteStatus'] = 'skipped';\n let remoteError: string | undefined;\n let remoteFailure: RemotePullFailure | undefined;\n const warnings: string[] = [];\n\n if (args.mode !== 'local-only') {\n if (args.remoteVersions) {\n remoteVersions = normalizeAndSortVersions(args.remoteVersions);\n remoteStatus = 'success';\n } else {\n const remoteLookup = await fetchRemoteVersions(args.packageName, {\n profile: args.profile,\n apiKey: args.apiKey,\n skipCache: args.mode === 'remote-primary'\n });\n\n if (remoteLookup.success) {\n remoteVersions = normalizeAndSortVersions(remoteLookup.versions);\n remoteStatus = 'success';\n } else {\n remoteStatus = 'failed';\n remoteError = describeRemoteFailure(args.packageName, remoteLookup.failure);\n remoteFailure = remoteLookup.failure;\n }\n }\n }\n\n if (args.mode === 'local-only') {\n return {\n localVersions: normalizedLocal,\n remoteVersions: [],\n availableVersions: normalizedLocal,\n remoteStatus: 'skipped',\n warnings\n };\n }\n\n if (args.mode === 'remote-primary') {\n if (remoteStatus !== 'success') {\n throw new Error(\n remoteError ?? `Remote registry data required to resolve ${args.packageName}`\n );\n }\n\n return {\n localVersions: normalizedLocal,\n remoteVersions,\n availableVersions: remoteVersions,\n remoteStatus,\n warnings,\n remoteFailure\n };\n }\n\n const fallbackToLocalOnly = remoteStatus !== 'success';\n\n if (fallbackToLocalOnly && remoteError && isScopedName(args.packageName)) {\n const reason = extractRemoteErrorReason(remoteError);\n warnings.push(`Remote pull failed for \\`${args.packageName}\\` (reason: ${reason})`);\n }\n\n return {\n localVersions: normalizedLocal,\n remoteVersions,\n availableVersions: fallbackToLocalOnly ? normalizedLocal : mergeAndSortVersions(normalizedLocal, remoteVersions),\n remoteStatus,\n warnings,\n remoteError,\n fallbackToLocalOnly,\n remoteFailure\n };\n}\n\nexport async function selectInstallVersionUnified(\n args: UnifiedInstallVersionSelectionArgs\n): Promise<UnifiedInstallVersionSelectionResult> {\n const mergedSelectionOptions: VersionSelectionOptions = {\n ...(args.selectionOptions ?? {}),\n ...(args.explicitPrereleaseIntent ? { explicitPrereleaseIntent: true } : {})\n };\n\n const applyFilter = (versions: string[]): string[] =>\n args.filterAvailableVersions ? args.filterAvailableVersions(versions) : versions;\n\n const attemptWithSources = (sources: VersionSourceSummary, modeContext: InstallResolutionMode) => {\n const filteredVersions = applyFilter(sources.availableVersions);\n\n const selection = selectVersionWithWipPolicy(\n filteredVersions,\n args.constraint,\n mergedSelectionOptions\n );\n\n const selectedVersion = selection.version;\n let resolutionSource: 'local' | 'remote' | undefined;\n if (selectedVersion) {\n const inLocal = sources.localVersions.includes(selectedVersion);\n const inRemote = sources.remoteVersions.includes(selectedVersion);\n if (inLocal && !inRemote) {\n resolutionSource = 'local';\n } else if (!inLocal && inRemote) {\n resolutionSource = 'remote';\n } else if (inLocal && inRemote) {\n resolutionSource = modeContext === 'remote-primary' ? 'remote' : 'local';\n }\n }\n\n return {\n selectedVersion,\n selection,\n sources,\n resolutionSource\n };\n };\n\n const gatherBase = {\n packageName: args.packageName,\n localVersions: args.localVersions,\n remoteVersions: args.remoteVersions,\n profile: args.profile,\n apiKey: args.apiKey\n };\n\n if (args.mode === 'local-only') {\n const sources = await gatherVersionSourcesForInstall({\n ...gatherBase,\n mode: 'local-only'\n });\n const result = attemptWithSources(sources, 'local-only');\n return {\n ...result,\n constraint: args.constraint,\n mode: args.mode\n };\n }\n\n if (args.mode === 'remote-primary') {\n const sources = await gatherVersionSourcesForInstall({\n ...gatherBase,\n mode: 'remote-primary'\n });\n const result = attemptWithSources(sources, 'remote-primary');\n return {\n ...result,\n constraint: args.constraint,\n mode: args.mode\n };\n }\n\n // Default mode: local-first with remote fallback.\n const localSources = await gatherVersionSourcesForInstall({\n ...gatherBase,\n mode: 'local-only'\n });\n const localAttempt = attemptWithSources(localSources, 'local-only');\n\n if (localAttempt.selectedVersion) {\n return {\n ...localAttempt,\n constraint: args.constraint,\n mode: args.mode\n };\n }\n\n const fallbackSources = await gatherVersionSourcesForInstall({\n ...gatherBase,\n mode: 'default'\n });\n\n if (fallbackSources.remoteStatus === 'failed') {\n const reason =\n fallbackSources.remoteError ??\n `Remote metadata lookup failed while resolving ${args.packageName}`;\n throw new Error(reason);\n }\n\n const fallbackAttempt = attemptWithSources(fallbackSources, args.mode);\n\n return {\n ...fallbackAttempt,\n constraint: args.constraint,\n mode: args.mode\n };\n}\n\nasync function fetchRemoteVersions(\n packageName: string,\n options: RemoteVersionLookupOptions,\n output?: OutputPort\n): Promise<RemoteVersionLookupResult> {\n const out = output ?? resolveOutput();\n const cacheManager = createCacheManager();\n \n // Check cached metadata first (skip if --remote flag is set)\n if (!options.skipCache) {\n const cachedMeta = await cacheManager.getCachedMetadata(packageName);\n if (cachedMeta && cachedMeta.versions.length > 0) {\n return { success: true, versions: cachedMeta.versions };\n }\n }\n \n const spinner = out.spinner();\n spinner.start(`Checking remote versions for ${packageName}...`);\n\n try {\n const metadataResult = await fetchRemotePackageMetadata(packageName, undefined, {\n profile: options.profile,\n apiKey: options.apiKey,\n recursive: false\n });\n\n if (!metadataResult.success) {\n return { success: false, failure: metadataResult };\n }\n\n const versions = extractVersionsFromRemoteResponse(metadataResult.response);\n \n // Cache the fetched versions\n if (versions.length > 0) {\n await cacheManager.cacheMetadata(packageName, versions);\n }\n \n return { success: true, versions };\n } finally {\n spinner.stop();\n }\n}\n\nfunction extractVersionsFromRemoteResponse(response: PullPackageResponse): string[] {\n const collected = new Set<string>();\n\n const candidates: Array<unknown> = [];\n const packageAny = response.package as any;\n if (Array.isArray(packageAny?.versions)) {\n candidates.push(...packageAny.versions);\n }\n\n const responseAny = response as any;\n if (Array.isArray(responseAny?.versions)) {\n candidates.push(...responseAny.versions);\n }\n if (Array.isArray(responseAny?.availableVersions)) {\n candidates.push(...responseAny.availableVersions);\n }\n\n for (const candidate of candidates) {\n const normalized = extractVersionString(candidate);\n if (normalized) {\n collected.add(normalized);\n }\n }\n\n if (response.version) {\n const normalizedVersion = response.version.version ?? UNVERSIONED;\n collected.add(normalizedVersion);\n }\n\n return Array.from(collected);\n}\n\nfunction extractVersionString(candidate: unknown): string | null {\n if (typeof candidate === 'string') {\n return semver.valid(candidate) ? candidate : null;\n }\n\n if (candidate && typeof candidate === 'object') {\n const value = (candidate as any).version;\n if (value === undefined || value === null) {\n return UNVERSIONED;\n }\n if (typeof value === 'string') {\n if (semver.valid(value)) {\n return value;\n }\n }\n }\n\n return null;\n}\n\nfunction normalizeAndSortVersions(versions: string[]): string[] {\n const normalized = new Set<string>();\n for (const version of versions) {\n if (typeof version !== 'string') {\n continue;\n }\n const trimmed = version.trim();\n if (!trimmed) {\n continue;\n }\n if (!semver.valid(trimmed)) {\n continue;\n }\n normalized.add(trimmed);\n }\n const sorted = Array.from(normalized).sort(semver.rcompare);\n return sorted;\n}\n\nfunction mergeAndSortVersions(left: string[], right: string[]): string[] {\n const merged = new Set<string>();\n\n for (const version of [...left, ...right]) {\n if (semver.valid(version)) {\n merged.add(version);\n }\n }\n\n const sorted = Array.from(merged).sort(semver.rcompare);\n return sorted;\n}\n\n", "import * as yaml from 'js-yaml';\nimport { PullPackageDownload, PullPackageResponse } from '../types/api.js';\nimport { Package, PackageYml } from '../types/index.js';\nimport { packageManager } from './package.js';\nimport type { PackageVersionState } from './package.js';\nimport { ensureRegistryDirectories } from './directory.js';\nimport { authManager } from './auth.js';\nimport { createHttpClient, HttpClient } from './http-client.js';\nimport { extractPackageFromTarball, verifyTarballIntegrity, ExtractedPackage } from '../utils/tarball.js';\nimport { logger } from '../utils/logger.js';\nimport { ConfigError, ValidationError } from '../utils/errors.js';\nimport { PACKAGE_PATHS } from '../constants/index.js';\nimport { formatVersionLabel } from './package-versioning.js';\nimport { normalizeRegistryPath } from './platform/registry-entry-filter.js';\nimport { mergePackageFiles } from '../utils/package-merge.js';\nimport { createCacheManager } from './cache-manager.js';\nimport type { OutputPort, UnifiedSpinner } from './ports/output.js';\nimport { resolveOutput } from './ports/resolve.js';\n\nconst NETWORK_ERROR_PATTERN = /(fetch failed|ENOTFOUND|EAI_AGAIN|ECONNREFUSED|ECONNRESET|ETIMEDOUT|EHOSTUNREACH|ENETUNREACH|network)/i;\n\nfunction matchesNetworkPattern(value: unknown): boolean {\n return typeof value === 'string' && NETWORK_ERROR_PATTERN.test(value);\n}\n\nfunction isNetworkFailure(error: Error): boolean {\n if (matchesNetworkPattern(error.message)) {\n return true;\n }\n\n const cause = (error as any).cause;\n if (cause && (matchesNetworkPattern(cause.message) || matchesNetworkPattern(cause.code) || matchesNetworkPattern(cause.errno))) {\n return true;\n }\n\n if (matchesNetworkPattern((error as any).code) || matchesNetworkPattern((error as any).errno)) {\n return true;\n }\n\n return false;\n}\n\nexport interface RemotePullContext {\n httpClient: HttpClient;\n profile: string;\n registryUrl: string;\n}\n\nexport interface RemotePullOptions {\n profile?: string;\n apiKey?: string;\n quiet?: boolean;\n preFetchedResponse?: PullPackageResponse;\n httpClient?: HttpClient;\n recursive?: boolean;\n paths?: string[];\n skipLocalCheck?: boolean; // For --remote flag, bypass local registry check\n output?: OutputPort; // Optional output port for spinner feedback\n spinner?: UnifiedSpinner; // Optional pre-existing spinner to reuse (avoids nested spinners)\n}\n\nexport interface RemoteBatchPullOptions extends RemotePullOptions {\n dryRun?: boolean;\n filter?: (name: string, version: string, download: PullPackageDownload) => boolean;\n skipIfFull?: boolean;\n}\n\nexport type RemotePullFailureReason =\n | 'not-found'\n | 'access-denied'\n | 'network'\n | 'integrity'\n | 'unknown';\n\nexport interface RemotePullFailure {\n success: false;\n reason: RemotePullFailureReason;\n message: string;\n statusCode?: number;\n error?: unknown;\n}\n\nexport interface RemotePullSuccess {\n success: true;\n name: string;\n version: string;\n response: PullPackageResponse;\n extracted: ExtractedPackage;\n registryUrl: string;\n profile: string;\n downloadUrl: string;\n tarballSize: number | undefined;\n}\n\nexport type RemotePullResult = RemotePullSuccess | RemotePullFailure;\n\nexport interface RemotePackageMetadataSuccess {\n success: true;\n context: RemotePullContext;\n response: PullPackageResponse;\n}\n\nexport type RemotePackageMetadataResult = RemotePackageMetadataSuccess | RemotePullFailure;\n\nexport interface BatchDownloadItemResult {\n name: string;\n version: string;\n downloadUrl?: string;\n success: boolean;\n error?: string;\n}\n\nexport interface RemoteBatchPullResult {\n success: boolean;\n pulled: BatchDownloadItemResult[];\n failed: BatchDownloadItemResult[];\n warnings?: string[];\n}\n\nfunction normalizeDownloadPaths(paths?: string[]): string[] {\n if (!paths || paths.length === 0) {\n return [];\n }\n\n const normalized = paths\n .filter(path => typeof path === 'string')\n .map(path => path.startsWith('/') ? path.slice(1) : path)\n .map(path => normalizeRegistryPath(path))\n .filter(path => path.length > 0);\n\n return Array.from(new Set(normalized));\n}\n\nexport function buildPullEndpoint(\n name: string,\n version?: string,\n options?: { recursive?: boolean; paths?: string[] }\n): string {\n const encodedName = encodeURIComponent(name);\n const hasVersion = version && version !== 'latest';\n const endpoint = hasVersion\n ? `/packages/pull/by-name/${encodedName}/v/${encodeURIComponent(version as string)}`\n : `/packages/pull/by-name/${encodedName}`;\n\n const params: string[] = [];\n if (options?.recursive) {\n params.push('recursive=true');\n }\n\n const normalizedPaths = normalizeDownloadPaths(options?.paths);\n if (normalizedPaths.length > 0) {\n const encodedPaths = normalizedPaths.map(path => encodeURIComponent(path)).join(',');\n params.push(`paths=${encodedPaths}`);\n params.push('includeManifest=true');\n }\n\n if (params.length === 0) {\n return endpoint;\n }\n\n const delimiter = endpoint.includes('?') ? '&' : '?';\n return `${endpoint}${delimiter}${params.join('&')}`;\n}\n\n/**\n * Parse a download identifier that may contain registry path segments.\n *\n * Supports forms like:\n * - foo@1.2.3\n * - foo/bar@1.2.3\n * - @scope/foo/bar@1.2.3\n * - foo@1.2.3/path/to/file\n * - @scope/foo@1.2.3/path/to/file\n *\n+ * The registry path (if present) is returned separately so callers can\n * preserve file-level intent once the backend supports file-scoped downloads.\n */\nexport function parseDownloadIdentifier(\n downloadName: string\n): { packageName: string; version: string; registryPath?: string } {\n // Special handling for gh@ prefix - find version @ that's not at position 2\n let atIndex = -1;\n if (downloadName.startsWith('gh@')) {\n // Find the last @ that's not the one in gh@\n for (let i = downloadName.length - 1; i >= 0; i--) {\n if (downloadName[i] === '@' && i !== 2) {\n atIndex = i;\n break;\n }\n }\n } else {\n atIndex = downloadName.lastIndexOf('@');\n }\n\n if (atIndex <= 0 || atIndex === downloadName.length - 1) {\n throw new Error(`Invalid download name '${downloadName}'. Expected format '<package>@<version>'.`);\n }\n\n const rawName = downloadName.slice(0, atIndex);\n const rawVersion = downloadName.slice(atIndex + 1);\n\n // Parse package name and optional path from the name portion\n let packageName: string;\n let namePath: string | undefined;\n if (rawName.startsWith('gh@')) {\n // GitHub format: gh@username/repo or gh@username/repo/path\n const segments = rawName.split('/');\n if (segments.length < 2) {\n throw new Error(`Invalid GitHub package in download name '${downloadName}'.`);\n }\n // Take first two segments: gh@username/repo\n packageName = segments.slice(0, 2).join('/');\n // Everything after repo is the path\n if (segments.length > 2) {\n namePath = segments.slice(2).join('/');\n }\n } else if (rawName.startsWith('@')) {\n // Scoped format: @scope/pkg\n const segments = rawName.split('/');\n if (segments.length < 2) {\n throw new Error(`Invalid scoped package in download name '${downloadName}'.`);\n }\n packageName = segments.slice(0, 2).join('/'); // @scope/pkg\n namePath = segments.length > 2 ? segments.slice(2).join('/') : undefined;\n } else {\n // Unscoped format\n const segments = rawName.split('/');\n packageName = segments[0];\n namePath = segments.length > 1 ? segments.slice(1).join('/') : undefined;\n }\n\n // Parse version and optional path from the version portion\n const versionSegments = rawVersion.split('/');\n const version = versionSegments[0];\n const versionPath = versionSegments.length > 1 ? versionSegments.slice(1).join('/') : undefined;\n\n if (!packageName || !version) {\n throw new Error(`Invalid download name '${downloadName}'. Expected format '<package>@<version>'.`);\n }\n\n const registryPathParts = [namePath, versionPath].filter(Boolean) as string[];\n const registryPath = registryPathParts.length > 0 ? registryPathParts.join('/') : undefined;\n\n return { packageName, version, registryPath };\n}\n\n/**\n * Backward-compatible wrapper returning only name/version.\n */\nexport function aggregateRecursiveDownloads(responses: PullPackageResponse[]): PullPackageDownload[] {\n const aggregated = new Map<string, PullPackageDownload>();\n\n for (const response of responses) {\n if (!Array.isArray(response.downloads)) {\n continue;\n }\n\n for (const download of response.downloads) {\n if (!download?.name) {\n continue;\n }\n\n const existing = aggregated.get(download.name);\n\n if (!existing) {\n aggregated.set(download.name, download);\n continue;\n }\n\n if (!existing.downloadUrl && download.downloadUrl) {\n aggregated.set(download.name, download);\n }\n }\n }\n\n return Array.from(aggregated.values());\n}\n\nexport function isPartialDownload(download?: PullPackageDownload): boolean {\n return false;\n}\n\nexport async function pullDownloadsBatchFromRemote(\n responses: PullPackageResponse | PullPackageResponse[],\n options: RemoteBatchPullOptions = {}\n): Promise<RemoteBatchPullResult> {\n const responseArray = Array.isArray(responses) ? responses : [responses];\n\n if (responseArray.length === 0) {\n return { success: true, pulled: [], failed: [] };\n }\n\n await ensureRegistryDirectories();\n\n const context = await createContext(options);\n const httpClient = context.httpClient;\n\n const downloads = aggregateRecursiveDownloads(responseArray);\n const pulled: BatchDownloadItemResult[] = [];\n const failed: BatchDownloadItemResult[] = [];\n const warnings: string[] = [];\n const stateCache = new Map<string, PackageVersionState>();\n \n const out = options.quiet ? undefined : options.output;\n const externalSpinner = options.spinner;\n const ownedSpinner = !externalSpinner ? out?.spinner() : undefined;\n const spinner = externalSpinner ?? ownedSpinner;\n const totalCount = downloads.length;\n let completedCount = 0;\n \n if (externalSpinner) {\n spinner?.message(`Downloading ${totalCount} dependencies`);\n } else {\n ownedSpinner?.start(`Downloading ${totalCount} dependencies`);\n }\n\n const getLocalState = async (name: string, version: string): Promise<PackageVersionState> => {\n const key = `${name}@${formatVersionLabel(version)}`;\n const cached = stateCache.get(key);\n if (cached) {\n return cached;\n }\n const state = await packageManager.getPackageVersionState(name, version);\n stateCache.set(key, state);\n return state;\n };\n\n const tasks = downloads.map(async (download) => {\n const identifier = download.name;\n\n let parsedName: { packageName: string; version: string; registryPath?: string };\n\n try {\n parsedName = parseDownloadIdentifier(identifier);\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n logger.warn(`Skipping download '${identifier}': ${message}`);\n failed.push({ name: identifier, version: '', downloadUrl: download.downloadUrl, success: false, error: message });\n completedCount++;\n spinner?.message(`Downloading dependencies (${completedCount}/${totalCount})`);\n return;\n }\n\n const { packageName: name, version } = parsedName;\n const isPartial = isPartialDownload(download);\n\n try {\n if (options.filter && !options.filter(name, version, download)) {\n completedCount++;\n spinner?.message(`Downloading dependencies (${completedCount}/${totalCount})`);\n return;\n }\n\n if (!download.downloadUrl) {\n const warning = `Download URL missing for ${identifier}`;\n logger.warn(warning);\n warnings.push(warning);\n failed.push({ name, version, downloadUrl: download.downloadUrl, success: false, error: 'download-url-missing' });\n completedCount++;\n spinner?.message(`Downloading dependencies (${completedCount}/${totalCount})`);\n return;\n }\n\n if (isPartial && options.skipIfFull !== false) {\n const localState = await getLocalState(name, version);\n if (localState.exists && !localState.isPartial) {\n const skipMessage = `${name}@${version} already exists locally (full); skipping partial download`;\n logger.info(skipMessage);\n warnings.push(skipMessage);\n pulled.push({ name, version, downloadUrl: download.downloadUrl, success: true });\n completedCount++;\n spinner?.message(`Downloading dependencies (${completedCount}/${totalCount})`);\n return;\n }\n }\n\n if (options.dryRun) {\n pulled.push({ name, version, downloadUrl: download.downloadUrl, success: true });\n completedCount++;\n spinner?.message(`Downloading dependencies (${completedCount}/${totalCount})`);\n return;\n }\n\n const tarballBuffer = await downloadPackageTarball(httpClient, download.downloadUrl);\n const extracted = await extractPackageFromTarball(tarballBuffer);\n const metadata = buildPackageMetadata(extracted, name, version);\n\n await packageManager.savePackage(\n { metadata, files: extracted.files },\n { partial: isPartial }\n );\n\n pulled.push({ name, version, downloadUrl: download.downloadUrl, success: true });\n } catch (error) {\n logger.debug('Batch download failed', { identifier, error });\n failed.push({\n name,\n version,\n downloadUrl: download.downloadUrl,\n success: false,\n error: error instanceof Error ? error.message : String(error)\n });\n }\n completedCount++;\n spinner?.message(`Downloading dependencies (${completedCount}/${totalCount})`);\n });\n\n await Promise.all(tasks);\n\n if (externalSpinner) {\n spinner?.message(`Downloaded ${pulled.length} dependencies${failed.length > 0 ? ` (${failed.length} failed)` : ''}`);\n } else {\n ownedSpinner?.stop(`Downloaded ${pulled.length} dependencies${failed.length > 0 ? ` (${failed.length} failed)` : ''}`);\n }\n\n return {\n success: failed.length === 0,\n pulled,\n failed,\n warnings: warnings.length > 0 ? warnings : undefined\n };\n}\n\nfunction buildPackageMetadata(\n extracted: ExtractedPackage,\n fallbackName: string,\n fallbackVersion: string\n): PackageYml {\n const packageFile = extracted.files.find(\n file => file.path === PACKAGE_PATHS.MANIFEST_RELATIVE\n );\n\n if (packageFile) {\n try {\n const parsed = yaml.load(packageFile.content) as PackageYml | undefined;\n\n if (parsed && typeof parsed === 'object' && parsed.name && parsed.version) {\n return parsed;\n }\n\n logger.debug('Parsed openpackage.yml missing required fields, falling back to inferred metadata', {\n fallbackName,\n fallbackVersion\n });\n } catch (error) {\n logger.debug('Failed to parse openpackage.yml from extracted tarball', {\n fallbackName,\n fallbackVersion,\n error\n });\n }\n }\n\n return {\n name: fallbackName,\n version: fallbackVersion,\n } as PackageYml;\n}\n\nexport async function fetchRemotePackageMetadata(\n name: string,\n version: string | undefined,\n options: RemotePullOptions = {}\n): Promise<RemotePackageMetadataResult> {\n try {\n await ensureRegistryDirectories();\n\n const context = await createContext(options);\n const response = await getRemotePackage(\n context.httpClient,\n name,\n version,\n options.recursive,\n options.paths\n );\n\n return {\n success: true,\n context,\n response\n };\n } catch (error) {\n return mapErrorToFailure(error);\n }\n}\n\nexport async function pullPackageFromRemote(\n name: string,\n version?: string,\n options: RemotePullOptions = {}\n): Promise<RemotePullResult> {\n // When an external spinner is provided (from loadPackagePhase), reuse it \u2014\n // only update its message text; the caller owns start/stop lifecycle.\n // Otherwise fall back to creating a new spinner from the output port.\n const externalSpinner = options.spinner;\n const out = options.quiet ? undefined : options.output;\n const ownedSpinner = !externalSpinner ? out?.spinner() : undefined;\n const spinner = externalSpinner ?? ownedSpinner;\n \n try {\n // CACHE CHECK: If specific version requested, check if already in local registry\n // Skip this check if skipLocalCheck is set (--remote flag forces fresh fetch)\n if (version && version !== 'latest' && !options.skipLocalCheck) {\n const cacheManager = createCacheManager();\n const localPath = await cacheManager.getLocalPackagePath(name, version);\n if (localPath) {\n logger.debug('Package already exists in local registry, skipping remote pull', { name, version, localPath });\n return {\n success: true,\n name,\n version,\n response: {} as PullPackageResponse,\n extracted: { files: [], checksum: '' },\n registryUrl: '',\n profile: '',\n downloadUrl: '',\n tarballSize: undefined\n };\n }\n }\n \n const versionLabel = version ? `@${version}` : '';\n if (externalSpinner) {\n spinner?.message(`Fetching ${name}${versionLabel} from registry`);\n } else {\n ownedSpinner?.start(`Fetching ${name}${versionLabel} from registry`);\n }\n \n const metadataResult = options.preFetchedResponse\n ? await createResultFromPrefetched(options)\n : await fetchRemotePackageMetadata(name, version, options);\n\n if (!metadataResult.success) {\n ownedSpinner?.stop();\n return metadataResult;\n }\n\n const { context, response } = metadataResult;\n const primaryDownload = resolvePrimaryDownload(response);\n if (!primaryDownload?.downloadUrl) {\n ownedSpinner?.stop();\n return {\n success: false,\n reason: 'access-denied',\n message: 'Package download not available for this account',\n };\n }\n\n const resolvedVersion = formatVersionLabel(response.version.version);\n spinner?.message(`Downloading ${name}@${resolvedVersion}`);\n \n const isPartial = isPartialDownload(primaryDownload);\n const tarballBuffer = await downloadPackageTarball(context.httpClient, primaryDownload.downloadUrl);\n\n const expectedSize = isPartial ? undefined : response.version.tarballSize;\n if (!verifyTarballIntegrity(tarballBuffer, expectedSize)) {\n ownedSpinner?.stop();\n return {\n success: false,\n reason: 'integrity',\n message: 'Tarball integrity verification failed'\n };\n }\n\n spinner?.message(`Extracting ${name}@${resolvedVersion}`);\n const extracted = await extractPackageFromTarball(tarballBuffer);\n\n await savePackageToLocalRegistry(response, extracted, {\n partial: isPartial\n });\n\n if (externalSpinner) {\n spinner?.message(`Fetched ${name}@${resolvedVersion}`);\n } else {\n ownedSpinner?.stop(`Fetched ${name}@${resolvedVersion}`);\n }\n\n return {\n success: true,\n name: response.package.name,\n version: resolvedVersion,\n response,\n extracted,\n registryUrl: context.registryUrl,\n profile: context.profile,\n downloadUrl: primaryDownload.downloadUrl,\n tarballSize: response.version.tarballSize\n };\n } catch (error) {\n ownedSpinner?.stop();\n return mapErrorToFailure(error);\n }\n}\n\nfunction resolvePrimaryDownload(response: PullPackageResponse): PullPackageDownload | undefined {\n if (!Array.isArray(response.downloads) || response.downloads.length === 0) {\n return undefined;\n }\n\n const primaryMatch = response.downloads.find(download => download.name === response.package.name && download.downloadUrl);\n if (primaryMatch?.downloadUrl) {\n return primaryMatch;\n }\n\n const fallbackMatch = response.downloads.find(download => download.downloadUrl);\n return fallbackMatch;\n}\n\nasync function createResultFromPrefetched(options: RemotePullOptions): Promise<RemotePackageMetadataResult> {\n if (!options.preFetchedResponse) {\n throw new Error('preFetchedResponse missing from options');\n }\n\n const context = await createContext(options);\n\n return {\n success: true,\n context,\n response: options.preFetchedResponse\n };\n}\n\nasync function createContext(options: RemotePullOptions): Promise<RemotePullContext> {\n const authOptions = {\n profile: options.profile,\n apiKey: options.apiKey\n };\n\n const httpClient = options.httpClient || await createHttpClient(authOptions);\n const profile = authManager.getCurrentProfile(authOptions);\n const registryUrl = authManager.getRegistryUrl();\n\n return {\n httpClient,\n profile,\n registryUrl\n };\n}\n\nasync function getRemotePackage(\n httpClient: HttpClient,\n name: string,\n version?: string,\n recursive?: boolean,\n paths?: string[],\n): Promise<PullPackageResponse> {\n const finalEndpoint = buildPullEndpoint(name, version, { recursive, paths });\n logger.debug(`Fetching remote package metadata`, {\n name,\n version: version ?? 'latest',\n endpoint: finalEndpoint,\n recursive: !!recursive,\n hasPaths: !!paths && paths.length > 0\n });\n return await httpClient.get<PullPackageResponse>(finalEndpoint);\n}\n\nasync function downloadPackageTarball(httpClient: HttpClient, downloadUrl: string): Promise<Buffer> {\n const downloadHost = (() => {\n try {\n return new URL(downloadUrl).host;\n } catch {\n return '';\n }\n })();\n const registryHost = (() => {\n try {\n return new URL(authManager.getRegistryUrl()).host;\n } catch {\n return '';\n }\n })();\n const shouldSkipAuth = downloadHost !== '' && registryHost !== '' && downloadHost !== registryHost;\n const buffer = await httpClient.downloadFile(downloadUrl, { skipAuth: shouldSkipAuth });\n return Buffer.from(buffer);\n}\n\nasync function savePackageToLocalRegistry(\n response: PullPackageResponse,\n extracted: ExtractedPackage,\n saveOptions: { partial?: boolean } = {}\n): Promise<void> {\n const metadata: PackageYml & Record<string, unknown> = {\n name: response.package.name,\n version: response.version.version,\n description: response.package.description,\n keywords: response.package.keywords,\n private: response.package.isPrivate\n };\n\n (metadata as any).files = extracted.files.map(file => file.path);\n (metadata as any).created = response.version.createdAt;\n (metadata as any).updated = response.version.updatedAt;\n\n let files = extracted.files;\n\n if (saveOptions.partial) {\n try {\n const existing = await packageManager.loadPackage(response.package.name, response.version.version);\n files = mergePackageFiles(existing.files, files);\n } catch {\n // No existing version; keep files as-is\n }\n }\n\n await packageManager.savePackage(\n { metadata: metadata as PackageYml, files },\n { partial: Boolean(saveOptions.partial) }\n );\n}\n\nfunction mapErrorToFailure(error: unknown): RemotePullFailure {\n logger.debug('Remote pull operation failed', { error });\n\n if (error instanceof ValidationError) {\n return {\n success: false,\n reason: 'integrity',\n message: error.message,\n error\n };\n }\n\n if (error instanceof ConfigError) {\n return {\n success: false,\n reason: 'access-denied',\n message: error.message,\n error\n };\n }\n\n if (error instanceof Error) {\n const apiError = (error as any).apiError;\n\n if (apiError?.statusCode === 404) {\n const failure: RemotePullFailure = {\n success: false,\n reason: 'not-found',\n message: error.message,\n statusCode: 404,\n error\n };\n return failure;\n }\n\n if (apiError?.statusCode === 401 || apiError?.statusCode === 403) {\n return {\n success: false,\n reason: 'access-denied',\n message: error.message,\n statusCode: apiError.statusCode,\n error\n };\n }\n\n if (isNetworkFailure(error)) {\n return {\n success: false,\n reason: 'network',\n message: error.message,\n error\n };\n }\n\n return {\n success: false,\n reason: 'unknown',\n message: error.message,\n error\n };\n }\n\n return {\n success: false,\n reason: 'unknown',\n message: 'Unknown error occurred',\n error\n };\n}\n\n\n", "import { FILE_PATTERNS, PACKAGE_PATHS } from '../constants/index.js';\nimport { normalizePathForProcessing } from './path-normalization.js';\n\n/**\n * Normalize package-internal paths for consistent comparisons.\n * Strips leading slash and converts to forward slashes.\n */\nexport function normalizePackagePath(path: string): string {\n const trimmed = path.startsWith('/') ? path.slice(1) : path;\n return normalizePathForProcessing(trimmed);\n}\n\n/**\n * Determine if a path points to a package manifest (either bare or canonical).\n */\nexport function isManifestPath(path: string): boolean {\n const normalized = normalizePackagePath(path);\n return (\n normalized === FILE_PATTERNS.OPENPACKAGE_YML ||\n normalized === PACKAGE_PATHS.MANIFEST_RELATIVE\n );\n}\n\n", "/**\n * Platform Utilities Module\n * Utility functions for platform management, detection, and file operations\n */\n\nimport { join } from 'path';\nimport { getPathLeaf } from '../../utils/path-normalization.js';\nimport { FILE_PATTERNS } from '../../constants/index.js';\nimport type { Platform, PlatformDetectionResult } from '../../types/platform.js';\nimport {\n getAllPlatforms,\n getPlatformRootFiles as getPlatformRootFileNames,\n getPlatformDefinition,\n getPlatformDirLookup,\n detectAllPlatforms\n} from '../platforms.js';\n\n/* Removed unused detectPlatformsWithDetails - use detectAllPlatforms directly for details */\n\n/**\n * Extract platform name from source directory path\n * Uses platform definitions for scalable platform detection\n * @param cwd - Optional cwd for local platform config overrides\n */\nexport function getPlatformNameFromSource(sourceDir: string, cwd?: string): string {\n // Quick lookup via dir map first\n const dirLookup = getPlatformDirLookup(cwd);\n const fromDir = dirLookup[sourceDir];\n if (fromDir) return fromDir;\n\n // Full scan for flow-based matches\n for (const platform of getAllPlatforms({ includeDisabled: true }, cwd)) {\n const definition = getPlatformDefinition(platform, cwd);\n \n // Check if sourceDir matches any export flow 'to' pattern directory (package \u2192 workspace)\n if (definition.export && definition.export.length > 0) {\n for (const flow of definition.export) {\n const toPattern = typeof flow.to === 'string' ? flow.to : Object.keys(flow.to)[0];\n if (toPattern) {\n // Extract directory from 'to' pattern\n const parts = toPattern.split('/');\n const subdirPath = parts.slice(0, -1).join('/');\n \n if (sourceDir.includes(subdirPath)) {\n return platform;\n }\n }\n }\n }\n }\n\n // Fallback: extract from path\n return getPathLeaf(sourceDir) || 'unknown';\n}\n\n/**\n * Get all platform directory names\n * Returns an array of all supported platform directory names\n * @param cwd - Optional cwd for local overrides\n */\nexport function getAllPlatformDirs(cwd?: string): string[] {\n return Object.keys(getPlatformDirLookup(cwd));\n}\n\n/**\n * Get all platform root files as a Set, including AGENTS.md universal file.\n * @param cwd - Optional cwd for local overrides\n */\nexport function getPlatformRootFiles(cwd?: string): Set<string> {\n const rootFiles = new Set(getPlatformRootFileNames(cwd)); // from platforms.ts, excludes AGENTS.md\n rootFiles.add(FILE_PATTERNS.AGENTS_MD);\n rootFiles.add(FILE_PATTERNS.CLAUDE_MD);\n rootFiles.add(FILE_PATTERNS.GEMINI_MD);\n rootFiles.add(FILE_PATTERNS.QWEN_MD);\n rootFiles.add(FILE_PATTERNS.WARP_MD);\n return rootFiles;\n}\n\nexport function isPlatformRootFile(fileName: string, cwd?: string): boolean {\n return getPlatformRootFiles(cwd).has(fileName);\n}\n\n\n\n", "import {\n FILE_PATTERNS\n} from '../../constants/index.js';\nimport { normalizePathForProcessing } from '../../utils/path-normalization.js';\nimport { \n matchesUniversalPattern,\n isPlatformId \n} from '../platforms.js';\nimport { isManifestPath } from '../../utils/manifest-paths.js';\nimport { isPlatformRootFile } from './platform-utils.js';\n\nexport function normalizeRegistryPath(registryPath: string): string {\n return normalizePathForProcessing(registryPath);\n}\n\nexport function isRootRegistryPath(registryPath: string): boolean {\n const normalized = normalizeRegistryPath(registryPath);\n const fileName = normalized.split('/').pop();\n return !!fileName && isPlatformRootFile(fileName);\n}\n\nexport function isSkippableRegistryPath(registryPath: string, cwd?: string): boolean {\n const normalized = normalizeRegistryPath(registryPath);\n \n // Handle openpackage.yml at any level\n if (isManifestPath(normalized)) {\n return true;\n }\n\n // Check if it's a platform-specific YML file (e.g., rules.cursor.yml)\n if (!normalized.endsWith(FILE_PATTERNS.YML_FILE)) {\n return false;\n }\n\n const fileName = normalized.split('/').pop();\n if (!fileName) {\n return false;\n }\n\n const parts = fileName.split('.');\n if (parts.length < 3) {\n return false;\n }\n\n const possiblePlatform = parts[parts.length - 2];\n return isPlatformId(possiblePlatform);\n}\n\n/**\n * Check if a registry path is allowed to be included in a package.\n * Uses flow-based pattern matching to determine if a file is universal content.\n * \n * @param registryPath - Path to validate\n * @param cwd - Optional cwd for local platform config overrides\n * @returns true if path should be included in package\n */\nexport function isAllowedRegistryPath(registryPath: string, cwd?: string): boolean {\n const normalized = normalizeRegistryPath(registryPath);\n\n // Root-level platform files (AGENTS.md, CLAUDE.md, etc.) are allowed as package content.\n // They are mapped by platform flows during install/apply.\n if (isRootRegistryPath(normalized)) return true;\n \n // Exclude platform-specific YML files\n if (isSkippableRegistryPath(normalized, cwd)) return false;\n\n // Flow-based validation: path must match at least one universal pattern\n return matchesUniversalPattern(normalized, cwd);\n}\n\n/**\n * Extract universal subdirectory info from a registry path if it starts with a known subdir.\n * Returns null for root-level files that match universal patterns.\n * \n * @param registryPath - Registry path to analyze\n * @param cwd - Optional cwd for local platform config overrides\n * @returns Subdirectory info or null\n * \n * @deprecated This function exists for backward compatibility with code that needs\n * to extract subdirectory information for path mapping. New code should use\n * matchesUniversalPattern() for validation.\n */\nexport function extractUniversalSubdirInfo(\n registryPath: string,\n cwd?: string\n): { universalSubdir: string; relPath: string } | null {\n const normalized = normalizeRegistryPath(registryPath);\n\n // Must match a universal pattern\n if (!matchesUniversalPattern(normalized, cwd)) {\n return null;\n }\n\n // Extract first path component\n const parts = normalized.split('/');\n const firstComponent = parts[0];\n \n // If first component contains a dot, it's a root-level file, not a subdir\n if (!firstComponent || firstComponent.includes('.')) {\n return null;\n }\n\n // First component is a directory\n const relPath = parts.slice(1).join('/');\n return {\n universalSubdir: firstComponent,\n relPath\n };\n}\n\n\n", "import * as yaml from 'js-yaml';\nimport { PackageFile, PackageYml } from '../types/index.js';\nimport { PACKAGE_PATHS } from '../constants/index.js';\nimport { normalizePathForProcessing } from './path-normalization.js';\nimport { serializePackageYml } from './package-yml.js';\n\nconst PACKAGE_INDEX = normalizePathForProcessing(PACKAGE_PATHS.INDEX_RELATIVE);\n\n/**\n * Merge two sets of package files:\n * - normalizes paths\n * - skips package index\n * - incoming wins on conflicts\n */\nexport function mergePackageFiles(base: PackageFile[], incoming: PackageFile[]): PackageFile[] {\n const byPath = new Map<string, PackageFile>();\n\n const addAll = (list: PackageFile[]) => {\n for (const file of list) {\n const normalized = normalizePathForProcessing(file.path) || file.path;\n if (normalized === PACKAGE_INDEX) continue; // never merge index\n byPath.set(normalized, { ...file, path: normalized });\n }\n };\n\n addAll(base);\n addAll(incoming);\n\n return Array.from(byPath.values());\n}\n\n/**\n * Remove partial: true from manifest content if present, re-serializing with existing helper.\n */\nexport function stripPartialFlag(files: PackageFile[], fallbackName?: string): PackageFile[] {\n return files.map(file => {\n const normalized = normalizePathForProcessing(file.path) || file.path;\n if (normalized !== PACKAGE_PATHS.MANIFEST_RELATIVE) {\n return file;\n }\n\n try {\n const parsed = (yaml.load(file.content) as PackageYml) || { name: fallbackName };\n if ((parsed as any).partial !== undefined) {\n delete (parsed as any).partial;\n return { ...file, content: serializePackageYml(parsed) };\n }\n return file;\n } catch {\n return file;\n }\n });\n}\n\n", "import { homedir } from 'os';\nimport { join } from 'path';\nimport { exists, ensureDir, readTextFile, writeTextFile } from '../utils/fs.js';\nimport { computeGitUrlHash } from '../utils/git-cache.js';\nimport { logger } from '../utils/logger.js';\nimport { getPackageVersionPath } from './directory.js';\n\nconst METADATA_TTL_MS = 10 * 60 * 1000; // 10 minutes\n\nexport interface CachedRefEntry {\n commit: string;\n fetchedAt: string;\n}\n\nexport interface GitRefCache {\n refs: Record<string, CachedRefEntry>;\n}\n\nexport interface CachedMetadata {\n versions: string[];\n fetchedAt: string;\n etag?: string;\n}\n\nexport interface CacheManager {\n getCachedCommitForRef(url: string, ref: string): Promise<string | null>;\n cacheRefCommit(url: string, ref: string, commit: string): Promise<void>;\n\n getLocalPackagePath(name: string, version: string): Promise<string | null>;\n hasLocalPackage(name: string, version: string): Promise<boolean>;\n\n getCachedMetadata(name: string): Promise<CachedMetadata | null>;\n cacheMetadata(name: string, versions: string[], etag?: string): Promise<void>;\n}\n\nfunction getGitRefsCacheDir(): string {\n return join(homedir(), '.openpackage', 'cache', 'git-refs');\n}\n\nfunction getMetadataCacheDir(): string {\n return join(homedir(), '.openpackage', 'cache', 'metadata');\n}\n\nfunction getGitRefCachePath(url: string): string {\n const urlHash = computeGitUrlHash(url);\n return join(getGitRefsCacheDir(), `${urlHash}.json`);\n}\n\nfunction getMetadataCachePath(name: string): string {\n const safeName = name.replace(/\\//g, '__');\n return join(getMetadataCacheDir(), `${safeName}.json`);\n}\n\nfunction isExpired(fetchedAt: string, ttlMs: number): boolean {\n const fetchedTime = new Date(fetchedAt).getTime();\n return Date.now() - fetchedTime > ttlMs;\n}\n\nasync function readJsonFile<T>(filePath: string): Promise<T | null> {\n if (!(await exists(filePath))) {\n return null;\n }\n\n try {\n const content = await readTextFile(filePath);\n return JSON.parse(content);\n } catch (error) {\n logger.warn(`Failed to read cache file at ${filePath}`, { error });\n return null;\n }\n}\n\nasync function writeJsonFile<T>(filePath: string, data: T): Promise<void> {\n const dir = join(filePath, '..');\n await ensureDir(dir);\n await writeTextFile(filePath, JSON.stringify(data, null, 2));\n}\n\nexport function createCacheManager(): CacheManager {\n return {\n async getCachedCommitForRef(url: string, ref: string): Promise<string | null> {\n const cachePath = getGitRefCachePath(url);\n const cache = await readJsonFile<GitRefCache>(cachePath);\n\n if (!cache?.refs?.[ref]) {\n return null;\n }\n\n // Return cached commit - caller decides whether to trust based on ref immutability\n // (git-clone.ts only uses this for immutable refs like semver tags)\n return cache.refs[ref].commit;\n },\n\n async cacheRefCommit(url: string, ref: string, commit: string): Promise<void> {\n const cachePath = getGitRefCachePath(url);\n let cache = await readJsonFile<GitRefCache>(cachePath);\n\n if (!cache) {\n cache = { refs: {} };\n }\n\n cache.refs[ref] = {\n commit,\n fetchedAt: new Date().toISOString(),\n };\n\n await writeJsonFile(cachePath, cache);\n logger.debug(`Cached git ref ${ref} -> ${commit.substring(0, 7)}`, { url });\n },\n\n async getLocalPackagePath(name: string, version: string): Promise<string | null> {\n const packagePath = getPackageVersionPath(name, version);\n\n if (await exists(packagePath)) {\n return packagePath;\n }\n\n return null;\n },\n\n async hasLocalPackage(name: string, version: string): Promise<boolean> {\n const packagePath = getPackageVersionPath(name, version);\n return await exists(packagePath);\n },\n\n async getCachedMetadata(name: string): Promise<CachedMetadata | null> {\n const cachePath = getMetadataCachePath(name);\n const cache = await readJsonFile<CachedMetadata>(cachePath);\n\n if (!cache) {\n return null;\n }\n\n if (isExpired(cache.fetchedAt, METADATA_TTL_MS)) {\n logger.debug(`Metadata cache expired for ${name}`, { fetchedAt: cache.fetchedAt });\n return null;\n }\n\n return cache;\n },\n\n async cacheMetadata(name: string, versions: string[], etag?: string): Promise<void> {\n const cachePath = getMetadataCachePath(name);\n\n const cache: CachedMetadata = {\n versions,\n fetchedAt: new Date().toISOString(),\n ...(etag && { etag }),\n };\n\n await writeJsonFile(cachePath, cache);\n logger.debug(`Cached metadata for ${name}`, { versionCount: versions.length });\n },\n };\n}\n", "import { createHash } from 'crypto';\nimport { join, basename } from 'path';\nimport { homedir } from 'os';\nimport { readdir } from 'fs/promises';\nimport { readTextFile, writeTextFile, exists, ensureDir } from './fs.js';\nimport { logger } from './logger.js';\nimport { normalizeGitUrl } from './git-url-parser.js';\n\n/**\n * Metadata stored at repository level.\n */\nexport interface GitRepoMetadata {\n url: string;\n normalized: string;\n lastFetched?: string;\n}\n\n/**\n * Metadata stored at commit level.\n */\nexport interface GitCommitMetadata {\n url: string;\n commit: string; // Full commit SHA\n ref?: string; // Branch/tag name if specified\n subdir?: string;\n clonedAt: string;\n lastAccessed: string;\n}\n\n/**\n * Cache entry information.\n */\nexport interface GitCacheEntry {\n urlHash: string;\n commitSha: string;\n path: string;\n metadata: GitCommitMetadata;\n}\n\n/**\n * Compute a hash of a Git URL for cache directory naming.\n * Uses 12 hex characters (48 bits) for short but collision-resistant paths.\n */\nexport function computeGitUrlHash(url: string): string {\n const normalized = normalizeGitUrl(url);\n const hash = createHash('sha256').update(normalized).digest('hex');\n \n // Use first 12 chars (48 bits)\n return hash.substring(0, 12);\n}\n\n/**\n * Get the base cache directory for Git repositories.\n * Returns: ~/.openpackage/cache/git/\n */\nexport function getGitCacheDir(): string {\n return join(homedir(), '.openpackage', 'cache', 'git');\n}\n\n/**\n * Get the cache directory path for a specific repository (by URL).\n * Returns: ~/.openpackage/cache/git/<url-hash>/\n */\nexport function getGitRepoCacheDir(url: string): string {\n const urlHash = computeGitUrlHash(url);\n const cacheDir = getGitCacheDir();\n return join(cacheDir, urlHash);\n}\n\n/**\n * Get the cache directory path for a specific commit.\n * Returns: ~/.openpackage/cache/git/<url-hash>/<commit-sha-7>/\n */\nexport function getGitCommitCacheDir(url: string, commitSha: string): string {\n const repoDir = getGitRepoCacheDir(url);\n const shortSha = commitSha.substring(0, 7);\n return join(repoDir, shortSha);\n}\n\n/**\n * Get the full cache path including subdirectory if specified.\n * Returns: ~/.openpackage/cache/git/<url-hash>/<commit-sha-7>/<subdirectory>/\n */\nexport function getGitCachePath(\n url: string,\n commitSha: string,\n subdirectory?: string\n): string {\n const commitDir = getGitCommitCacheDir(url, commitSha);\n \n if (subdirectory) {\n return join(commitDir, subdirectory);\n }\n \n return commitDir;\n}\n\n/**\n * Get metadata file path for a repository.\n */\nfunction getRepoMetadataPath(repoDir: string): string {\n return join(repoDir, '.opkg-repo.json');\n}\n\n/**\n * Get metadata file path for a commit.\n */\nfunction getCommitMetadataPath(commitDir: string): string {\n return join(commitDir, '.opkg-commit.json');\n}\n\n/**\n * Write repository metadata.\n */\nexport async function writeRepoMetadata(\n repoDir: string,\n metadata: GitRepoMetadata\n): Promise<void> {\n const metaPath = getRepoMetadataPath(repoDir);\n await ensureDir(repoDir);\n await writeTextFile(metaPath, JSON.stringify(metadata, null, 2));\n}\n\n/**\n * Read repository metadata.\n */\nexport async function readRepoMetadata(\n repoDir: string\n): Promise<GitRepoMetadata | null> {\n const metaPath = getRepoMetadataPath(repoDir);\n \n if (!(await exists(metaPath))) {\n return null;\n }\n \n try {\n const content = await readTextFile(metaPath);\n return JSON.parse(content);\n } catch (error) {\n logger.warn(`Failed to read repo metadata at ${metaPath}`, { error });\n return null;\n }\n}\n\n/**\n * Write commit metadata.\n */\nexport async function writeCommitMetadata(\n commitDir: string,\n metadata: GitCommitMetadata\n): Promise<void> {\n const metaPath = getCommitMetadataPath(commitDir);\n await ensureDir(commitDir);\n await writeTextFile(metaPath, JSON.stringify(metadata, null, 2));\n}\n\n/**\n * Read commit metadata.\n */\nexport async function readCommitMetadata(\n commitDir: string\n): Promise<GitCommitMetadata | null> {\n const metaPath = getCommitMetadataPath(commitDir);\n \n if (!(await exists(metaPath))) {\n return null;\n }\n \n try {\n const content = await readTextFile(metaPath);\n return JSON.parse(content);\n } catch (error) {\n logger.warn(`Failed to read commit metadata at ${metaPath}`, { error });\n return null;\n }\n}\n\n/**\n * Update last accessed time for a cached commit.\n */\nexport async function touchCacheEntry(commitDir: string): Promise<void> {\n const metadata = await readCommitMetadata(commitDir);\n \n if (metadata) {\n metadata.lastAccessed = new Date().toISOString();\n await writeCommitMetadata(commitDir, metadata);\n }\n}\n\n/**\n * Check if a commit is already cached.\n */\nexport async function isCommitCached(url: string, commitSha: string): Promise<boolean> {\n const commitDir = getGitCommitCacheDir(url, commitSha);\n return await exists(commitDir);\n}\n\n/**\n * List all cached commits for a repository.\n */\nexport async function listRepoCachedCommits(url: string): Promise<GitCacheEntry[]> {\n const repoDir = getGitRepoCacheDir(url);\n \n if (!(await exists(repoDir))) {\n return [];\n }\n \n const entries: GitCacheEntry[] = [];\n \n try {\n const items = await readdir(repoDir);\n \n for (const item of items) {\n // Skip metadata files\n if (item.startsWith('.opkg-')) {\n continue;\n }\n \n const commitDir = join(repoDir, item);\n const metadata = await readCommitMetadata(commitDir);\n \n if (metadata) {\n const urlHash = basename(repoDir);\n entries.push({\n urlHash,\n commitSha: item,\n path: commitDir,\n metadata\n });\n }\n }\n } catch (error) {\n logger.warn(`Failed to list cached commits for ${url}`, { error });\n }\n \n return entries;\n}\n\n/**\n * List all cached Git repositories.\n */\nexport async function listAllCachedRepos(): Promise<{\n urlHash: string;\n path: string;\n metadata: GitRepoMetadata | null;\n commits: GitCacheEntry[];\n}[]> {\n const cacheDir = getGitCacheDir();\n \n if (!(await exists(cacheDir))) {\n return [];\n }\n \n const repos: {\n urlHash: string;\n path: string;\n metadata: GitRepoMetadata | null;\n commits: GitCacheEntry[];\n }[] = [];\n \n try {\n const items = await readdir(cacheDir);\n \n for (const urlHash of items) {\n const repoDir = join(cacheDir, urlHash);\n const metadata = await readRepoMetadata(repoDir);\n \n // Get all commits for this repo\n const commits = metadata ? await listRepoCachedCommits(metadata.url) : [];\n \n repos.push({\n urlHash,\n path: repoDir,\n metadata,\n commits\n });\n }\n } catch (error) {\n logger.warn(`Failed to list cached repos`, { error });\n }\n \n return repos;\n}\n", "/**\n * Extract a concise, user-friendly reason string from a raw error message.\n *\n * This is used to normalize low-level network / registry / fetch errors into\n * short labels that can be embedded in messages like:\n * \"Remote pull failed for `<pkg>` (reason: <reason>)\"\n */\nexport function extractRemoteErrorReason(message: string): string {\n const normalized = (message || '').trim();\n\n if (!normalized) {\n return 'unknown error';\n }\n\n // Specific phrases we already emit elsewhere\n if (normalized.includes('not found in remote registry')) {\n return 'not found in remote registry';\n }\n if (/Access denied/i.test(normalized)) {\n return 'access denied';\n }\n if (/Network error/i.test(normalized)) {\n return 'network error';\n }\n if (/Integrity check failed/i.test(normalized)) {\n return 'integrity check failed';\n }\n\n // Common network / fetch failures\n if (/fetch failed/i.test(normalized)) {\n return 'network error';\n }\n if (/network|ENOTFOUND|ECONNREFUSED|ETIMEDOUT/i.test(normalized)) {\n return 'network error';\n }\n\n // Not found / HTTP 404 style errors\n if (/not found|404/i.test(normalized)) {\n return 'not found in remote registry';\n }\n\n // Access / auth errors\n if (/access denied|unauthorized|403|401/i.test(normalized)) {\n return 'access denied';\n }\n\n // Integrity / checksum issues\n if (/integrity|checksum/i.test(normalized)) {\n return 'integrity check failed';\n }\n\n // Fallback: return message if short enough, otherwise truncate\n if (normalized.length <= 50) {\n return normalized;\n }\n\n return `${normalized.slice(0, 47)}...`;\n}\n\n\n", "import type { RemoteBatchPullResult, RemotePullFailure } from '../remote-pull.js';\nimport { createDownloadKey } from './download-keys.js';\nimport { extractRemoteErrorReason } from '../../utils/error-reasons.js';\nimport type { OutputPort } from '../ports/output.js';\nimport { resolveOutput } from '../ports/resolve.js';\n\n/**\n * Record the outcome of a batch pull operation\n */\nexport function recordBatchOutcome(\n label: string,\n result: RemoteBatchPullResult,\n warnings: string[],\n dryRun: boolean,\n output?: OutputPort\n): void {\n const out = output ?? resolveOutput();\n if (result.warnings) {\n warnings.push(...result.warnings);\n }\n\n const successful = result.pulled.map(item => createDownloadKey(item.name, item.version));\n const failed = result.failed.map(item => ({\n key: createDownloadKey(item.name, item.version),\n error: item.error ?? 'Unknown error'\n }));\n\n if (dryRun) {\n if (successful.length > 0) {\n out.info(`Would ${label}: ${successful.join(', ')}`);\n }\n\n if (failed.length > 0) {\n for (const failure of failed) {\n const reason = extractRemoteErrorReason(failure.error);\n const message = `Dry run: remote pull would fail for \\`${failure.key}\\` (reason: ${reason})`;\n out.warn(message);\n warnings.push(message);\n }\n }\n\n return;\n }\n\n if (successful.length > 0) {\n out.success(`${label}: ${successful.length}`);\n for (const key of successful) {\n out.info(` \u251C\u2500\u2500 ${key}`);\n }\n }\n\n if (failed.length > 0) {\n for (const failure of failed) {\n const reason = extractRemoteErrorReason(failure.error);\n const message = `Remote pull failed for \\`${failure.key}\\` (reason: ${reason})`;\n out.warn(message);\n warnings.push(message);\n }\n }\n}\n\n/**\n * Describe a remote failure in a user-friendly way\n */\nexport function describeRemoteFailure(label: string, failure: RemotePullFailure): string {\n switch (failure.reason) {\n case 'not-found':\n return `Package '${label}' not found in remote registry`;\n case 'access-denied':\n return failure.message || `Access denied pulling ${label}`;\n case 'network':\n return failure.message || `Network error pulling ${label}`;\n case 'integrity':\n return failure.message || `Integrity check failed pulling ${label}`;\n default:\n return failure.message || `Failed to pull ${label}`;\n }\n}\n", "import { SCOPED_PACKAGE_REGEX, GITHUB_PACKAGE_REGEX, normalizePackageName, validatePackageName, isScopedName } from '../../utils/package-name.js';\n\nexport { isScopedName } from '../../utils/package-name.js';\nimport { listAllPackages, getPackagePath } from '../directory.js';\nimport { exists } from '../../utils/fs.js';\nimport { configManager } from '../config.js';\nimport type { PromptPort } from '../ports/prompt.js';\nimport { resolvePrompt } from '../ports/resolve.js';\nimport { UserCancellationError } from '../../utils/errors.js';\n\n/**\n * Extract the local (non-scope) part from a package name.\n */\nexport function getLocalPart(name: string): string {\n const match = name.match(SCOPED_PACKAGE_REGEX);\n return match ? match[2] : name;\n}\n\n/**\n * Get all scoped package names in the local registry that share the same local name.\n */\nconst PACKAGE_LIST_CACHE_TTL_MS = 5000;\nlet cachedPackageList: string[] | null = null;\nlet cachedPackageListTimestamp = 0;\n\nasync function getCachedPackageList(): Promise<string[]> {\n const now = Date.now();\n if (cachedPackageList && now - cachedPackageListTimestamp < PACKAGE_LIST_CACHE_TTL_MS) {\n return cachedPackageList;\n }\n\n cachedPackageList = await listAllPackages();\n cachedPackageListTimestamp = now;\n return cachedPackageList;\n}\n\nexport async function findScopedVariantsInRegistry(baseName: string): Promise<string[]> {\n const normalizedBase = normalizePackageName(baseName);\n const packages = await getCachedPackageList();\n\n return packages.filter(candidate => {\n const match = candidate.match(SCOPED_PACKAGE_REGEX);\n if (!match) {\n return false;\n }\n const localPart = normalizePackageName(match[2]);\n return localPart === normalizedBase;\n });\n}\n\nasync function isPackageNameTaken(name: string): Promise<boolean> {\n const normalized = normalizePackageName(name);\n return await exists(getPackagePath(normalized));\n}\n\nfunction buildScopedNameFromScope(unscopedName: string, scope: string): string {\n const normalizedScope = normalizePackageName(scope.replace(/^@/, ''));\n const normalizedName = normalizePackageName(unscopedName);\n return `@${normalizedScope}/${normalizedName}`;\n}\n\nasync function ensureScopedNameAvailable(name: string): Promise<void> {\n try {\n validatePackageName(name);\n } catch (error) {\n throw new Error((error as Error).message.replace('%s', name));\n }\n\n if (!isScopedName(name)) {\n throw new Error('Name must be scoped (e.g. @scope/name)');\n }\n\n if (await isPackageNameTaken(name)) {\n throw new Error(\n `Package '${name}' already exists in local registry. Choose a different scoped name.`\n );\n }\n}\n\n/**\n * Fetch the configured default scope for a given profile (if any).\n */\nexport async function getDefaultScopeForProfile(profileName?: string): Promise<string | undefined> {\n if (!profileName) {\n return undefined;\n }\n\n const config = await configManager.getAll();\n const profileConfig = config.profiles?.[profileName];\n return profileConfig?.defaults?.scope;\n}\n\n/**\n * Suggest a scoped package name using the configured default scope.\n */\nexport async function suggestScopedNameFromConfig(\n unscopedName: string,\n profileName?: string\n): Promise<string | undefined> {\n const defaultScope = await getDefaultScopeForProfile(profileName);\n if (!defaultScope) {\n return undefined;\n }\n\n const normalizedScope = normalizePackageName(defaultScope.replace(/^@/, ''));\n const normalizedName = normalizePackageName(unscopedName);\n return `@${normalizedScope}/${normalizedName}`;\n}\n\n/**\n * Prompt user for a new scoped name and ensure it does not already exist locally.\n */\nexport async function promptForNewScopedName(\n baseName: string,\n profileName?: string,\n message?: string,\n prompt?: PromptPort\n): Promise<string> {\n const prm = prompt ?? resolvePrompt();\n const initial = await suggestScopedNameFromConfig(baseName, profileName);\n\n const scopedName = await prm.text(\n message ?? `Enter a scoped name for '${baseName}' (format @scope/${baseName}):`,\n {\n initial,\n validate: async (value: string) => {\n if (!value) return 'Name is required';\n try {\n await ensureScopedNameAvailable(value);\n return true;\n } catch (error) {\n return (error as Error).message;\n }\n }\n }\n );\n\n if (!scopedName) {\n throw new UserCancellationError('Operation cancelled by user');\n }\n\n return normalizePackageName(scopedName);\n}\n\n/**\n * Determine the scoped name to use when pushing an unscoped package.\n */\nexport async function resolveScopedNameForPush(\n unscopedName: string,\n profileName?: string,\n prompt?: PromptPort\n): Promise<string> {\n if (isScopedName(unscopedName)) {\n throw new Error(`Expected unscoped name, received '${unscopedName}'`);\n }\n\n return await promptForNewScopedName(\n unscopedName,\n profileName,\n `Remote registry requires a scope. Enter a scoped name for '${unscopedName}' (format @scope/${unscopedName}):`,\n prompt\n );\n}\n\nexport async function resolveScopedNameForPushWithUserScope(\n unscopedName: string,\n username: string,\n profileName?: string,\n prompt?: PromptPort\n): Promise<string> {\n if (isScopedName(unscopedName)) {\n throw new Error(`Expected unscoped name, received '${unscopedName}'`);\n }\n\n if (!username?.trim()) {\n throw new Error('Username is required to apply default scope.');\n }\n\n const prm = prompt ?? resolvePrompt();\n const normalizedName = normalizePackageName(unscopedName);\n\n const choice = await prm.select<'default' | 'custom'>(\n `Package '${normalizedName}' must be scoped before pushing. Choose a scope:`,\n [\n {\n title: `Use default scope @${username}`,\n value: 'default' as const,\n description: `Renames to @${username}/${normalizedName}`\n },\n {\n title: 'Enter scope...',\n value: 'custom' as const,\n description: `Enter a custom scope for ${normalizedName}`\n }\n ],\n 'Use arrow keys to select, Enter to confirm'\n );\n\n if (!choice) {\n throw new UserCancellationError('Operation cancelled by user');\n }\n\n let scope = username;\n if (choice === 'custom') {\n const profileScope = await getDefaultScopeForProfile(profileName);\n const initialScope = profileScope?.replace(/^@/, '') || username;\n\n const enteredScope = await prm.text(\n `Enter a scope (without @) for '${normalizedName}':`,\n {\n initial: initialScope,\n validate: async (value: string) => {\n if (!value) return 'Scope is required';\n\n const candidate = buildScopedNameFromScope(normalizedName, value);\n try {\n await ensureScopedNameAvailable(candidate);\n return true;\n } catch (error) {\n return (error as Error).message;\n }\n }\n }\n );\n\n if (!enteredScope) {\n throw new UserCancellationError('Operation cancelled by user');\n }\n\n scope = enteredScope;\n }\n\n const scopedName = buildScopedNameFromScope(normalizedName, scope);\n await ensureScopedNameAvailable(scopedName);\n return scopedName;\n}\n\nexport interface SaveNameResolution {\n effectiveName: string;\n selectedExistingScopedName?: string;\n newScopedName?: string;\n nameChanged: boolean;\n}\n\n/**\n * Resolve which name should be used for a save invocation, prompting when needed.\n */\nexport async function resolveEffectiveNameForSave(\n inputName: string,\n profileName?: string,\n prompt?: PromptPort\n): Promise<SaveNameResolution> {\n const prm = prompt ?? resolvePrompt();\n const normalizedInput = normalizePackageName(inputName);\n\n if (isScopedName(normalizedInput)) {\n return {\n effectiveName: normalizedInput,\n nameChanged: false\n };\n }\n\n const scopedVariants = await findScopedVariantsInRegistry(normalizedInput);\n if (scopedVariants.length === 0) {\n return {\n effectiveName: normalizedInput,\n nameChanged: false\n };\n }\n\n const choice = await prm.select<string>(\n `Found scoped packages matching '${normalizedInput}'. How should this save proceed?`,\n [\n ...scopedVariants.map(variant => ({\n title: `Use existing scoped package ${variant}`,\n value: variant,\n description: `Treat this package as '${variant}'`\n })),\n {\n title: 'Create a new scoped name',\n value: '__create_new_scoped__',\n description: 'Create a brand new scoped identity (will prompt for name)'\n },\n {\n title: `Keep unscoped name '${normalizedInput}'`,\n value: '__keep_unscoped__',\n description: 'Continue saving as unscoped (push will still require scoping later)'\n }\n ],\n 'Use arrow keys to select, Enter to confirm'\n );\n\n if (!choice) {\n throw new UserCancellationError('Operation cancelled by user');\n }\n\n if (choice === '__keep_unscoped__') {\n return {\n effectiveName: normalizedInput,\n nameChanged: false\n };\n }\n\n if (choice === '__create_new_scoped__') {\n const newScopedName = await promptForNewScopedName(normalizedInput, profileName, undefined, prm);\n return {\n effectiveName: newScopedName,\n newScopedName,\n nameChanged: newScopedName !== normalizedInput\n };\n }\n\n const normalizedChoice = normalizePackageName(choice);\n return {\n effectiveName: normalizedChoice,\n selectedExistingScopedName: normalizedChoice,\n nameChanged: normalizedChoice !== normalizedInput\n };\n}\n\n", "import type { PackageSourceLoader, LoadedPackage } from './base.js';\nimport type { PackageSource } from '../unified/context.js';\nimport type { InstallOptions, ExecutionContext } from '../../../types/index.js';\nimport type { UnifiedSpinner } from '../../ports/output.js';\nimport { SourceLoadError } from './base.js';\nimport { parsePackageYml } from '../../../utils/package-yml.js';\nimport {\n resolvePackageContentRoot,\n detectWorkspaceMutableSource,\n detectGlobalMutableSource\n} from '../local-source-resolution.js';\nimport { resolveRegistryVersion } from '../../source-resolution/resolve-registry-version.js';\nimport { hasPackageVersion } from '../../directory.js';\nimport { pullPackageFromRemote } from '../../remote-pull.js';\nimport { join } from 'path';\n\n/**\n * Loads packages from the local registry\n */\nexport class RegistrySourceLoader implements PackageSourceLoader {\n canHandle(source: PackageSource): boolean {\n return source.type === 'registry';\n }\n \n async load(\n source: PackageSource,\n options: InstallOptions,\n execContext: ExecutionContext,\n spinner?: UnifiedSpinner\n ): Promise<LoadedPackage> {\n if (!source.packageName) {\n throw new SourceLoadError(source, 'Package name is required for registry sources');\n }\n \n // Resolve version to latest when not specified (regression fix for regular registry installs)\n if (!source.version) {\n const resolved = await resolveRegistryVersion(source.packageName, {\n mode: options.resolutionMode ?? 'default',\n cwd: execContext.targetDir,\n profile: options.profile,\n apiKey: options.apiKey\n });\n source.version = resolved.version;\n }\n \n // If package is not available (workspace, global, or registry) and mode allows remote, pull first\n const mode = options.resolutionMode ?? 'default';\n const inWorkspace = await detectWorkspaceMutableSource(execContext.targetDir, source.packageName);\n const inGlobal = await detectGlobalMutableSource(source.packageName);\n const inRegistry = await hasPackageVersion(source.packageName, source.version);\n const availableLocally = !!(inWorkspace || inGlobal || inRegistry);\n\n const mutableSource = inWorkspace ?? inGlobal;\n if (mutableSource) {\n source.mutableSourceOverride = {\n kind: mutableSource.kind,\n packageRootDir: mutableSource.packageRootDir,\n };\n }\n\n if (!availableLocally && mode !== 'local-only') {\n const pullResult = await pullPackageFromRemote(source.packageName, source.version, {\n profile: options.profile,\n apiKey: options.apiKey,\n skipLocalCheck: mode === 'remote-primary',\n spinner\n });\n if (!pullResult.success) {\n const reason = pullResult.reason ?? 'unknown';\n const message = pullResult.message ?? 'Remote pull failed';\n throw new SourceLoadError(\n source,\n `Package ${source.packageName}@${source.version} not in local registry and remote pull failed: ${message} (reason: ${reason})`\n );\n }\n } else if (availableLocally && mode === 'remote-primary') {\n const pullResult = await pullPackageFromRemote(source.packageName, source.version, {\n profile: options.profile,\n apiKey: options.apiKey,\n skipLocalCheck: true,\n spinner\n });\n if (!pullResult.success) {\n const reason = pullResult.reason ?? 'unknown';\n const message = pullResult.message ?? 'Remote pull failed';\n throw new SourceLoadError(\n source,\n `--remote: Package ${source.packageName}@${source.version} remote pull failed: ${message} (reason: ${reason})`\n );\n }\n }\n if (!availableLocally && mode === 'local-only') {\n throw new SourceLoadError(\n source,\n `Package ${source.packageName}@${source.version} not found in local registry. Use default resolution (remove --local) to pull from remote.`\n );\n }\n \n try {\n // Resolve content root (use targetDir for registry location)\n const contentRoot = await resolvePackageContentRoot({\n cwd: execContext.targetDir,\n packageName: source.packageName,\n version: source.version\n });\n \n // Load package metadata\n const manifestPath = join(contentRoot, 'openpackage.yml');\n const metadata = await parsePackageYml(manifestPath);\n \n return {\n metadata,\n packageName: source.packageName,\n version: source.version,\n contentRoot,\n source: 'registry'\n };\n } catch (error) {\n throw new SourceLoadError(\n source,\n `Failed to load package ${source.packageName}@${source.version} from registry`,\n error as Error\n );\n }\n }\n \n}\n", "import { resolve, basename } from 'path';\nimport type { PackageSourceLoader, LoadedPackage } from './base.js';\nimport type { PackageSource } from '../unified/context.js';\nimport type { InstallOptions, ExecutionContext } from '../../../types/index.js';\nimport { SourceLoadError } from './base.js';\nimport { loadPackageFromPath } from '../path-package-loader.js';\nimport { detectPluginType } from '../plugin-detector.js';\nimport { detectBaseForFilepath } from '../base-detector.js';\nimport { getPlatformsState } from '../../../core/platforms.js';\nimport { logger } from '../../../utils/logger.js';\nimport { exists } from '../../../utils/fs.js';\nimport { formatNoPatternMatchError } from '../../../utils/install-error-messages.js';\n\n/**\n * Loads packages from local file paths (directories or tarballs)\n */\nexport class PathSourceLoader implements PackageSourceLoader {\n canHandle(source: PackageSource): boolean {\n return source.type === 'path';\n }\n \n async load(\n source: PackageSource,\n options: InstallOptions,\n execContext: ExecutionContext\n ): Promise<LoadedPackage> {\n if (!source.localPath) {\n throw new SourceLoadError(source, 'Local path is required for path sources');\n }\n \n try {\n // Resolve paths using sourceCwd for input resolution\n const resolvedPath = resolve(execContext.sourceCwd, source.localPath);\n \n // Phase 5: If manifest base is present, skip detection (reproducibility)\n let detectedBaseInfo: any = null;\n if (source.manifestBase) {\n // Use base from manifest instead of detecting\n // For path sources, manifestBase is relative to the path source itself\n const absoluteBase = resolve(resolvedPath, source.manifestBase);\n detectedBaseInfo = {\n matchType: 'manifest',\n base: absoluteBase,\n baseRelative: source.manifestBase,\n matchedPattern: null\n };\n \n source.detectedBase = absoluteBase;\n \n logger.info('Using base from manifest for path source', {\n base: source.manifestBase,\n absoluteBase\n });\n } else if (source.resourcePath) {\n // NEW: If a resource path was specified, detect base\n const platformsState = getPlatformsState(execContext.targetDir);\n const platformsConfig = platformsState.config;\n\n // For resource-centric installs, prefer detecting base from the actual resource path\n // when the resource exists under the provided localPath.\n const candidateAbsoluteResourcePath = resolve(resolvedPath, source.resourcePath);\n detectedBaseInfo = await detectBaseForFilepath(\n (await exists(candidateAbsoluteResourcePath)) ? candidateAbsoluteResourcePath : resolvedPath,\n platformsConfig\n );\n \n logger.info('Base detection result for path source', {\n matchType: detectedBaseInfo.matchType,\n base: detectedBaseInfo.base,\n matchedPattern: detectedBaseInfo.matchedPattern\n });\n \n // Phase 6: Enhanced error message with pattern suggestions\n if (detectedBaseInfo.matchType === 'none') {\n const resourcePath = source.resourcePath || source.localPath || '';\n const errorMessage = formatNoPatternMatchError(resourcePath, platformsConfig);\n throw new SourceLoadError(source, errorMessage);\n }\n \n // Store detected base in source\n if (detectedBaseInfo.base) {\n source.detectedBase = detectedBaseInfo.base;\n }\n }\n \n // Use detected base as content root if available\n const contentRoot = detectedBaseInfo?.base || resolvedPath;\n \n // Detect if this is a Claude Code plugin\n const pluginDetection = await detectPluginType(contentRoot);\n \n // Check if marketplace\n if (detectedBaseInfo?.matchType === 'marketplace') {\n return {\n metadata: null as any,\n packageName: '',\n version: '0.0.0',\n contentRoot,\n source: 'path',\n pluginMetadata: {\n isPlugin: true,\n pluginType: 'marketplace',\n manifestPath: pluginDetection.manifestPath || detectedBaseInfo?.manifestPath\n },\n sourceMetadata: {\n // Preserve the repo root for resource-centric installs so that\n // computePathScoping resolves resourcePath (which is relative to\n // resolvedPath) against the correct reference frame.\n ...(source.resourcePath ? { repoPath: resolvedPath } : {}),\n baseDetection: detectedBaseInfo\n }\n };\n }\n \n // Build context for package loading\n // If gitSourceOverride exists, use it for proper git-based naming\n const loadContext: any = {\n repoPath: contentRoot,\n marketplaceEntry: source.pluginMetadata?.marketplaceEntry,\n resourcePath: source.resourcePath\n };\n \n if (source.gitSourceOverride) {\n loadContext.gitUrl = source.gitSourceOverride.gitUrl;\n loadContext.path = source.gitSourceOverride.gitPath;\n }\n \n // Load package from path, passing git context for proper scoping\n let sourcePackage = await loadPackageFromPath(contentRoot, loadContext);\n \n const packageName = sourcePackage.metadata.name;\n const version = sourcePackage.metadata.version || '0.0.0';\n \n // Note: Plugin transformation is handled by the main flow, not here\n return {\n metadata: sourcePackage.metadata,\n packageName,\n version,\n contentRoot,\n source: 'path',\n pluginMetadata: pluginDetection.isPlugin ? {\n isPlugin: true,\n pluginType: pluginDetection.type as any // Can be 'individual', 'marketplace', or 'marketplace-defined'\n } : undefined,\n sourceMetadata: {\n // Preserve the repo root for resource-centric installs so that\n // computePathScoping resolves resourcePath (which is relative to\n // resolvedPath) against the correct reference frame.\n ...(source.resourcePath ? { repoPath: resolvedPath } : {}),\n baseDetection: detectedBaseInfo\n }\n };\n } catch (error) {\n throw new SourceLoadError(\n source,\n `Failed to load package from path: ${source.localPath}`,\n error as Error\n );\n }\n }\n \n}\n", "/**\n * Base detection algorithm for resource-based installation.\n * \n * Detects the \"base\" directory of a resource - the parent directory\n * that serves as the package root for installation flows.\n * \n * Detection priority:\n * 1. openpackage.yml at resource root\n * 2. .claude-plugin/marketplace.json at resource root (triggers selection flow)\n * 3. .claude-plugin/plugin.json at resource root\n * 4. Pattern matching against platforms.jsonc (deepest match)\n * 5. Resource-direct resolution (explicit resourcePath exists on disk)\n */\n\nimport { join, resolve, dirname, relative, isAbsolute, sep } from 'path';\nimport { exists, readTextFile } from '../../utils/fs.js';\nimport { extractAllFromPatterns, findDeepestMatch, type PatternMatch } from '../../utils/pattern-matcher.js';\nimport { logger } from '../../utils/logger.js';\nimport { FILE_PATTERNS, CLAUDE_PLUGIN_PATHS } from '../../constants/index.js';\nimport { normalizePluginSource } from './plugin-sources.js';\nimport { stat } from 'fs/promises';\n\n/**\n * Result of base detection\n */\nexport interface BaseDetectionResult {\n /** Absolute path to detected base (undefined if no match) */\n base: string | undefined;\n \n /** The from pattern that matched (for pattern-based detection) */\n matchedPattern?: string;\n \n /** How the base was determined */\n matchType: \n | 'openpackage' // Found openpackage.yml\n | 'marketplace' // Found marketplace.json (needs selection)\n | 'plugin' // Found plugin.json\n | 'pattern' // Matched from pattern\n | 'ambiguous' // Multiple patterns at same depth\n | 'resource-direct' // Resolved directly from explicit resourcePath\n | 'none'; // No match found\n \n /** For ambiguous cases, all possible matches */\n ambiguousMatches?: Array<{\n pattern: string;\n base: string;\n startIndex: number;\n }>;\n \n /** Path to marketplace manifest (for marketplace detection) */\n manifestPath?: string;\n}\n\n/**\n * Detect the base directory for a resource.\n * \n * @param resourcePath - Path to the resource (relative to repoRoot, or absolute if filepath)\n * @param repoRoot - Root directory of the repository/package\n * @param platformsConfig - Platforms configuration object for pattern matching\n * @returns Base detection result\n */\nexport async function detectBase(\n resourcePath: string,\n repoRoot: string,\n platformsConfig: any\n): Promise<BaseDetectionResult> {\n // Resolve absolute path to resource\n const absoluteResourcePath = isAbsolute(resourcePath) \n ? resourcePath \n : resolve(repoRoot, resourcePath);\n const repoRootResolved = resolve(repoRoot);\n\n // Determine whether resourcePath points to a file or directory.\n // If it's a file, manifests must be discovered by walking up from the file's directory.\n let probeStart = absoluteResourcePath;\n let statIsDir: boolean | null = null;\n let statIsFile: boolean | null = null;\n try {\n const s = await stat(absoluteResourcePath);\n statIsDir = s.isDirectory();\n statIsFile = s.isFile();\n if (!s.isDirectory()) {\n probeStart = dirname(absoluteResourcePath);\n }\n } catch {\n // If stat fails, keep probeStart as-is and fall back to patterns.\n }\n\n const isWithinRepo = (absPath: string): boolean => {\n if (absPath === repoRootResolved) return true;\n return absPath.startsWith(`${repoRootResolved}${sep}`);\n };\n\n // Track marketplace root (do not immediately return it for file-scoped resource installs)\n // so we can attempt resolving an individual plugin base first.\n let marketplaceRoot: { base: string; manifestPath: string } | null = null;\n\n // Priority 1-3: Walk up directories from probeStart to repoRoot, preferring the deepest match.\n // This allows file resources inside a marketplace repo to resolve to the specific plugin base\n // (plugin.json) before hitting the marketplace root.\n let currentDir = probeStart;\n let previousDir = '';\n while (currentDir !== previousDir && isWithinRepo(currentDir)) {\n // Priority 1: openpackage.yml\n const openpackageYmlPath = join(currentDir, FILE_PATTERNS.OPENPACKAGE_YML);\n if (await exists(openpackageYmlPath)) {\n logger.info('Base detected via openpackage.yml', { base: currentDir });\n return {\n base: currentDir,\n matchType: 'openpackage'\n };\n }\n\n // Priority 2: marketplace.json\n const marketplacePath = join(currentDir, CLAUDE_PLUGIN_PATHS.MARKETPLACE_MANIFEST);\n if (await exists(marketplacePath)) {\n logger.info('Base detected via marketplace.json', { base: currentDir });\n marketplaceRoot = { base: currentDir, manifestPath: marketplacePath };\n }\n\n // Priority 3: plugin.json\n const pluginPath = join(currentDir, CLAUDE_PLUGIN_PATHS.PLUGIN_MANIFEST);\n if (await exists(pluginPath)) {\n logger.info('Base detected via plugin.json', { base: currentDir });\n return {\n base: currentDir,\n matchType: 'plugin'\n };\n }\n\n previousDir = currentDir;\n currentDir = dirname(currentDir);\n }\n\n // Marketplace-aware plugin base inference:\n // If this repo is a Claude marketplace AND the user provided a file/dir resource path within it,\n // try to resolve the plugin base from marketplace.json plugin entries (e.g. \"./plugins/unit-testing\").\n // Handles both string sources (\"./path\") and object sources ({ source: 'github', ... }).\n if (marketplaceRoot && resourcePath && !isAbsolute(resourcePath)) {\n try {\n const raw = await readTextFile(marketplaceRoot.manifestPath);\n const parsed = JSON.parse(raw) as { plugins?: Array<{ name?: string; source?: any; strict?: boolean }> };\n const plugins = Array.isArray(parsed.plugins) ? parsed.plugins : [];\n\n const normalizeRel = (value: string): string => value.replace(/\\\\/g, '/').replace(/^\\.\\/?/, '').replace(/^\\/+/, '');\n const normalizedResource = normalizeRel(resourcePath);\n\n let bestMatch: { rel: string; pluginName?: string } | null = null;\n for (const p of plugins) {\n const source = (p as any)?.source;\n if (!source) continue;\n \n // Extract relative path from any source format (string or PluginSourceObject)\n let relRaw: string | undefined;\n if (typeof source === 'string') {\n relRaw = source;\n } else {\n // Handle PluginSourceObject ({ source: 'github', ... } or { source: 'url', ... })\n // These point to external repos and have an optional `path` field for subdirectory.\n // For local marketplace inference, only the `path` field is meaningful as a relative\n // location within the marketplace repo. If there's no `path`, this entry points to\n // an external repo root and can't be matched against a local resourcePath.\n try {\n const normalized = normalizePluginSource(source, p?.name || 'unknown');\n if (normalized.type === 'relative-path') {\n relRaw = normalized.relativePath;\n }\n // Git-type sources (github/url) reference external repos, not local paths.\n // Skip them for local marketplace base inference.\n } catch {\n // Invalid source spec; skip this entry.\n }\n }\n \n if (!relRaw) continue;\n const rel = normalizeRel(relRaw);\n if (!rel) continue;\n if (normalizedResource === rel || normalizedResource.startsWith(`${rel}/`)) {\n if (!bestMatch || rel.length > bestMatch.rel.length) {\n bestMatch = { rel, pluginName: p?.name };\n }\n }\n }\n\n if (bestMatch) {\n const inferredBase = resolve(repoRootResolved, bestMatch.rel);\n logger.info('Base inferred from marketplace plugin entry', { base: inferredBase, plugin: bestMatch.pluginName, rel: bestMatch.rel });\n return {\n base: inferredBase,\n matchType: 'plugin'\n };\n }\n } catch (error) {\n // Ignore marketplace parsing issues; fall back to patterns/marketplace mode.\n }\n }\n\n // Priority 4: Pattern matching\n const patternResult = await detectBaseFromPatterns(resourcePath, repoRoot, platformsConfig);\n if (patternResult.matchType !== 'none') {\n return patternResult;\n }\n\n // Priority 5: Resource-direct resolution.\n // When an explicit resourcePath was provided and all other detection methods failed or\n // resolved to the repo root (marketplace fallback), resolve directly to the resource path.\n // This ensures that \"install this specific resource\" always wins over \"this is a marketplace.\"\n // A base must always be a directory, so if resourcePath points to a file, use its parent.\n if (resourcePath && !isAbsolute(resourcePath)) {\n const directPath = resolve(repoRootResolved, resourcePath);\n if (await exists(directPath)) {\n let directBase: string;\n try {\n const s = await stat(directPath);\n directBase = s.isDirectory() ? directPath : dirname(directPath);\n } catch {\n directBase = directPath;\n }\n logger.info('Base resolved directly from explicit resourcePath', {\n resourcePath,\n base: directBase\n });\n return {\n base: directBase,\n matchType: 'resource-direct'\n };\n }\n }\n\n // Fallback: if we discovered a marketplace root and nothing else matched, return marketplace.\n if (marketplaceRoot) {\n return {\n base: marketplaceRoot.base,\n matchType: 'marketplace',\n manifestPath: marketplaceRoot.manifestPath\n };\n }\n\n return patternResult;\n}\n\n/**\n * Detect base using pattern matching against platforms.jsonc.\n * \n * @param resourcePath - Path to the resource (relative to repoRoot)\n * @param repoRoot - Root directory of the repository\n * @param platformsConfig - Platforms configuration object\n * @returns Base detection result\n */\nasync function detectBaseFromPatterns(\n resourcePath: string,\n repoRoot: string,\n platformsConfig: any\n): Promise<BaseDetectionResult> {\n // Extract all patterns from platforms config\n const patterns = extractAllFromPatterns(platformsConfig);\n\n // Match resource path against patterns\n const result = findDeepestMatch(resourcePath, patterns);\n\n if (!result) {\n logger.warn('No pattern matched for resource', { resourcePath });\n return {\n base: undefined,\n matchType: 'none'\n };\n }\n\n // Calculate absolute base path\n const basePath = result.match.basePath \n ? resolve(repoRoot, result.match.basePath)\n : repoRoot;\n\n if (result.isAmbiguous && result.ambiguousMatches) {\n logger.info('Ambiguous base detected', {\n resourcePath,\n matchCount: result.ambiguousMatches.length,\n patterns: result.ambiguousMatches.map(m => m.pattern)\n });\n\n return {\n base: basePath,\n matchType: 'ambiguous',\n matchedPattern: result.match.pattern,\n ambiguousMatches: result.ambiguousMatches.map(m => ({\n pattern: m.pattern,\n base: m.basePath ? resolve(repoRoot, m.basePath) : repoRoot,\n startIndex: m.startIndex\n }))\n };\n }\n\n logger.info('Base detected via pattern matching', {\n resourcePath,\n base: basePath,\n pattern: result.match.pattern,\n startIndex: result.match.startIndex\n });\n\n return {\n base: basePath,\n matchedPattern: result.match.pattern,\n matchType: 'pattern'\n };\n}\n\n/**\n * Detect base for a file path source (local filesystem).\n * Similar to detectBase but handles local paths specially.\n * \n * @param absolutePath - Absolute path to the resource\n * @param platformsConfig - Platforms configuration object\n * @returns Base detection result\n */\nexport async function detectBaseForFilepath(\n absolutePath: string,\n platformsConfig: any\n): Promise<BaseDetectionResult> {\n // For file paths, we need to find the base by traversing up the directory tree\n // and checking each parent directory for manifest files or pattern matches\n\n let currentPath = absolutePath;\n const s = await stat(absolutePath);\n \n // If it's a file, start from its directory\n if (!s.isDirectory()) {\n currentPath = dirname(absolutePath);\n }\n\n // Check current directory for manifests\n const manifestResult = await checkForManifests(currentPath);\n if (manifestResult) {\n return manifestResult;\n }\n\n // Try pattern matching\n // For file paths, we need to extract a relative path structure to match against patterns\n // We'll traverse up the tree looking for a point where the remaining path matches a pattern\n \n return await detectBaseForFilepathViaPatterns(absolutePath, platformsConfig);\n}\n\n/**\n * Check a directory for manifest files.\n */\nasync function checkForManifests(dirPath: string): Promise<BaseDetectionResult | null> {\n // Check for openpackage.yml\n const openpackageYml = join(dirPath, FILE_PATTERNS.OPENPACKAGE_YML);\n if (await exists(openpackageYml)) {\n return {\n base: dirPath,\n matchType: 'openpackage'\n };\n }\n\n // Check for marketplace.json\n const marketplace = join(dirPath, CLAUDE_PLUGIN_PATHS.MARKETPLACE_MANIFEST);\n if (await exists(marketplace)) {\n return {\n base: dirPath,\n matchType: 'marketplace',\n manifestPath: marketplace\n };\n }\n\n // Check for plugin.json\n const plugin = join(dirPath, CLAUDE_PLUGIN_PATHS.PLUGIN_MANIFEST);\n if (await exists(plugin)) {\n return {\n base: dirPath,\n matchType: 'plugin'\n };\n }\n\n return null;\n}\n\n/**\n * Detect base for a file path using pattern matching.\n * Traverses up the directory tree to find a matching pattern.\n */\nasync function detectBaseForFilepathViaPatterns(\n absolutePath: string,\n platformsConfig: any\n): Promise<BaseDetectionResult> {\n const patterns = extractAllFromPatterns(platformsConfig);\n \n let currentPath = absolutePath;\n const s = await stat(absolutePath);\n \n // If it's a file, start from its directory\n if (!s.isDirectory()) {\n currentPath = dirname(absolutePath);\n }\n\n // Traverse up the directory tree\n let previousPath = '';\n while (currentPath !== previousPath) {\n // Build relative path from current directory to the resource\n const relativePath = relative(currentPath, absolutePath);\n \n if (!relativePath || relativePath === '.') {\n // Reached the resource itself\n previousPath = currentPath;\n currentPath = dirname(currentPath);\n continue;\n }\n\n // Try matching this relative path against patterns\n const result = findDeepestMatch(relativePath, patterns);\n \n if (result) {\n // Found a match!\n const basePath = result.match.basePath\n ? resolve(currentPath, result.match.basePath)\n : currentPath;\n\n if (result.isAmbiguous && result.ambiguousMatches) {\n return {\n base: basePath,\n matchType: 'ambiguous',\n matchedPattern: result.match.pattern,\n ambiguousMatches: result.ambiguousMatches.map(m => ({\n pattern: m.pattern,\n base: m.basePath ? resolve(currentPath, m.basePath) : currentPath,\n startIndex: m.startIndex\n }))\n };\n }\n\n return {\n base: basePath,\n matchedPattern: result.match.pattern,\n matchType: 'pattern'\n };\n }\n\n // Move up one directory\n previousPath = currentPath;\n currentPath = dirname(currentPath);\n }\n\n // No match found\n return {\n base: undefined,\n matchType: 'none'\n };\n}\n", "/**\n * Pattern matching utilities for base detection.\n * \n * Extracts patterns from platforms.jsonc and matches resource paths\n * against them using segment-indexed matching with deepest match resolution.\n */\n\nimport { minimatch } from 'minimatch';\nimport { logger } from './logger.js';\n\n/**\n * A pattern match result\n */\nexport interface PatternMatch {\n /** The pattern that matched */\n pattern: string;\n \n /** Segment index where the match begins (0-based) */\n startIndex: number;\n \n /** The matched portion of the path */\n matchedPath: string;\n \n /** The base portion (everything before the match) */\n basePath: string;\n}\n\n/**\n * Extract all \"from\" patterns from a platforms configuration object.\n * \n * @param platformsConfig - The platforms.jsonc configuration object\n * @returns Array of unique patterns\n */\nexport function extractAllFromPatterns(platformsConfig: any): string[] {\n const patterns = new Set<string>();\n\n // Global flows\n if (platformsConfig.global?.export) {\n for (const flow of platformsConfig.global.export) {\n addFlowPatterns(flow.from, patterns);\n }\n }\n\n // Platform-specific flows\n for (const [key, value] of Object.entries(platformsConfig)) {\n if (key === 'global' || key === '$schema') continue;\n \n const platformDef = value as any;\n if (platformDef.export) {\n for (const flow of platformDef.export) {\n addFlowPatterns(flow.from, patterns);\n }\n }\n }\n\n return Array.from(patterns);\n}\n\n/**\n * Add patterns from a flow's \"from\" field.\n * Handles string, array, and $switch expressions.\n */\nfunction addFlowPatterns(from: any, patterns: Set<string>): void {\n if (typeof from === 'string') {\n patterns.add(from);\n } else if (typeof from === 'object' && from !== null && 'pattern' in from && typeof from.pattern === 'string') {\n // Pattern object (e.g. { pattern: \"agents/**/*.md\", schema?: \"...\" })\n patterns.add(from.pattern);\n } else if (Array.isArray(from)) {\n for (const p of from) {\n if (typeof p === 'string') {\n patterns.add(p);\n } else if (typeof p === 'object' && p !== null && 'pattern' in p && typeof (p as any).pattern === 'string') {\n patterns.add((p as any).pattern);\n }\n }\n } else if (typeof from === 'object' && from.$switch) {\n // Handle $switch expressions - extract patterns from cases\n if (from.$switch.cases) {\n for (const c of from.$switch.cases) {\n if (typeof c.value === 'string') {\n patterns.add(c.value);\n } else if (Array.isArray(c.value)) {\n for (const v of c.value) {\n if (typeof v === 'string') {\n patterns.add(v);\n } else if (typeof v === 'object' && v !== null && 'pattern' in v && typeof (v as any).pattern === 'string') {\n patterns.add((v as any).pattern);\n }\n }\n } else if (typeof c.value === 'object' && c.value !== null && 'pattern' in c.value && typeof (c.value as any).pattern === 'string') {\n patterns.add((c.value as any).pattern);\n }\n }\n }\n if (from.$switch.default) {\n if (typeof from.$switch.default === 'string') {\n patterns.add(from.$switch.default);\n } else if (typeof from.$switch.default === 'object' && from.$switch.default !== null && 'pattern' in from.$switch.default && typeof (from.$switch.default as any).pattern === 'string') {\n patterns.add((from.$switch.default as any).pattern);\n }\n }\n }\n}\n\n/**\n * Match a resource path against an array of patterns.\n * Returns all matches with their segment indices.\n * \n * @param resourcePath - The path to match (e.g., \"plugins/ui/agents/designer.md\")\n * @param patterns - Array of glob patterns to match against\n * @returns Array of pattern matches\n */\nexport function matchPatterns(resourcePath: string, patterns: string[]): PatternMatch[] {\n const matches: PatternMatch[] = [];\n \n // Normalize path: remove leading/trailing slashes, split into segments\n const normalizedPath = resourcePath.replace(/^\\/+|\\/+$/g, '');\n const segments = normalizedPath.split('/').filter(s => s.length > 0);\n \n if (segments.length === 0) {\n return matches;\n }\n\n // Try matching each pattern\n for (const pattern of patterns) {\n // Normalize pattern\n const normalizedPattern = pattern.replace(/^\\/+|\\/+$/g, '');\n const patternSegments = normalizedPattern.split('/').filter(s => s.length > 0);\n \n if (patternSegments.length === 0) continue;\n\n // Try matching the pattern starting at each segment index\n for (let startIndex = 0; startIndex < segments.length; startIndex++) {\n const candidatePath = segments.slice(startIndex).join('/');\n \n // Use minimatch to test if the candidate path matches the pattern\n if (minimatch(candidatePath, normalizedPattern, { dot: true })) {\n const basePath = startIndex > 0 ? segments.slice(0, startIndex).join('/') : '';\n \n matches.push({\n pattern: normalizedPattern,\n startIndex,\n matchedPath: candidatePath,\n basePath\n });\n \n // Only record the first (earliest) match for this pattern\n break;\n }\n }\n }\n\n logger.debug('Pattern matching results', {\n resourcePath: normalizedPath,\n matchCount: matches.length,\n matches: matches.map(m => ({\n pattern: m.pattern,\n startIndex: m.startIndex,\n basePath: m.basePath\n }))\n });\n\n return matches;\n}\n\n/**\n * Select the deepest match from an array of pattern matches.\n * \n * The deepest match is the one with the highest startIndex\n * (i.e., the pattern that matches furthest from the root).\n * \n * If multiple patterns match at the same depth, returns all of them\n * as ambiguous matches.\n * \n * @param matches - Array of pattern matches\n * @returns Object with deepest match(es) and whether it's ambiguous\n */\nexport function selectDeepestMatch(matches: PatternMatch[]): {\n match: PatternMatch;\n isAmbiguous: boolean;\n ambiguousMatches?: PatternMatch[];\n} {\n if (matches.length === 0) {\n throw new Error('Cannot select deepest match from empty array');\n }\n\n if (matches.length === 1) {\n return {\n match: matches[0],\n isAmbiguous: false\n };\n }\n\n // Find the maximum start index\n const maxStartIndex = Math.max(...matches.map(m => m.startIndex));\n \n // Get all matches at that depth\n const deepestMatches = matches.filter(m => m.startIndex === maxStartIndex);\n \n if (deepestMatches.length === 1) {\n return {\n match: deepestMatches[0],\n isAmbiguous: false\n };\n }\n\n // Multiple matches at the same depth - ambiguous\n return {\n match: deepestMatches[0], // Return first as default\n isAmbiguous: true,\n ambiguousMatches: deepestMatches\n };\n}\n\n/**\n * Match a resource path and return the deepest match.\n * This is a convenience function that combines matchPatterns and selectDeepestMatch.\n * \n * @param resourcePath - The path to match\n * @param patterns - Array of glob patterns\n * @returns Deepest match result, or null if no matches\n */\nexport function findDeepestMatch(\n resourcePath: string,\n patterns: string[]\n): {\n match: PatternMatch;\n isAmbiguous: boolean;\n ambiguousMatches?: PatternMatch[];\n} | null {\n const matches = matchPatterns(resourcePath, patterns);\n \n if (matches.length === 0) {\n return null;\n }\n\n return selectDeepestMatch(matches);\n}\n", "/**\n * Plugin source type definitions and normalization.\n * Implements Claude Code marketplace plugin source specification.\n * \n * See: https://code.claude.com/docs/en/plugin-marketplaces#plugin-sources\n */\n\nimport { parseGitUrl } from '../../utils/git-url-parser.js';\nimport { logger } from '../../utils/logger.js';\nimport { ValidationError } from '../../utils/errors.js';\n\n/**\n * Structured source object types from Claude Code spec.\n * \n * Supported source types:\n * - Relative paths: string like \"./plugins/my-plugin\"\n * - GitHub: { source: 'github', repo: 'owner/repo', ref?, path? }\n * - Git URL: { source: 'url', url: 'https://...', ref?, path? }\n */\nexport type PluginSourceSpec = string | PluginSourceObject;\n\nexport type PluginSourceObject = GitHubSource | GitUrlSource;\n\nexport interface GitHubSource {\n source: 'github';\n repo: string; // \"owner/repo\" format\n ref?: string; // Optional branch/tag/sha\n path?: string; // Optional subdirectory within repo\n}\n\nexport interface GitUrlSource {\n source: 'url';\n url: string; // Full git URL\n ref?: string; // Optional branch/tag/sha\n path?: string; // Optional subdirectory\n}\n\n/**\n * Normalized plugin source for internal use.\n * All source types are converted to this common format.\n */\nexport interface NormalizedPluginSource {\n type: 'relative-path' | 'git';\n \n // For relative-path type\n relativePath?: string;\n \n // For git type (both GitHub and Git URL)\n gitUrl?: string;\n gitRef?: string;\n gitPath?: string;\n \n // Original spec for reference\n rawSource: PluginSourceSpec;\n}\n\n/**\n * Normalize a plugin source spec into a consistent internal format.\n * Handles all source types from the Claude Code marketplace spec.\n * \n * @param source - Plugin source from marketplace manifest\n * @param pluginName - Plugin name for error messages\n * @returns Normalized source\n * @throws ValidationError if source is invalid\n */\nexport function normalizePluginSource(\n source: PluginSourceSpec,\n pluginName: string\n): NormalizedPluginSource {\n if (!source) {\n throw new ValidationError(\n `Plugin '${pluginName}' missing required 'source' field`\n );\n }\n \n // Case 1: String source (relative path)\n if (typeof source === 'string') {\n return normalizeRelativePathSource(source, pluginName);\n }\n \n // Case 2: Structured source object\n const sourceObj = source as PluginSourceObject;\n \n if (!sourceObj.source) {\n throw new ValidationError(\n `Plugin '${pluginName}' has invalid source object: missing 'source' field`\n );\n }\n \n switch (sourceObj.source) {\n case 'github':\n return normalizeGitHubSource(sourceObj as GitHubSource, pluginName);\n \n case 'url':\n return normalizeGitUrlSource(sourceObj as GitUrlSource, pluginName);\n \n default:\n throw new ValidationError(\n `Plugin '${pluginName}' has unsupported source type: '${(sourceObj as any).source}'. ` +\n `Supported types: 'github', 'url', or relative path string`\n );\n }\n}\n\n/**\n * Normalize a relative path source.\n */\nfunction normalizeRelativePathSource(\n path: string,\n pluginName: string\n): NormalizedPluginSource {\n // Validate path doesn't traverse upward beyond marketplace root\n if (path.includes('..')) {\n throw new ValidationError(\n `Plugin '${pluginName}' source path contains '..' which is not allowed for security reasons`\n );\n }\n \n // Validate path is not absolute\n if (path.startsWith('/')) {\n throw new ValidationError(\n `Plugin '${pluginName}' source path must be relative to marketplace root, not absolute`\n );\n }\n \n // Normalize path: strip leading ./ if present\n const normalizedPath = path.startsWith('./') ? path.substring(2) : path;\n \n return {\n type: 'relative-path',\n relativePath: normalizedPath,\n rawSource: path\n };\n}\n\n/**\n * Normalize a GitHub source.\n */\nfunction normalizeGitHubSource(\n source: GitHubSource,\n pluginName: string\n): NormalizedPluginSource {\n // Validate repo format\n if (!source.repo) {\n throw new ValidationError(\n `Plugin '${pluginName}' GitHub source missing 'repo' field`\n );\n }\n \n if (!source.repo.includes('/')) {\n throw new ValidationError(\n `Plugin '${pluginName}' GitHub source 'repo' must be in 'owner/repo' format, got: '${source.repo}'`\n );\n }\n \n const parts = source.repo.split('/');\n if (parts.length !== 2 || !parts[0] || !parts[1]) {\n throw new ValidationError(\n `Plugin '${pluginName}' GitHub source 'repo' must be in 'owner/repo' format, got: '${source.repo}'`\n );\n }\n \n // Convert to full git URL\n const gitUrl = `https://github.com/${source.repo}.git`;\n \n return {\n type: 'git',\n gitUrl,\n gitRef: source.ref,\n gitPath: source.path,\n rawSource: source\n };\n}\n\n/**\n * Normalize a Git URL source.\n */\nfunction normalizeGitUrlSource(\n source: GitUrlSource,\n pluginName: string\n): NormalizedPluginSource {\n // Validate URL field exists\n if (!source.url) {\n throw new ValidationError(\n `Plugin '${pluginName}' Git URL source missing 'url' field`\n );\n }\n \n // Validate URL format by attempting to parse it\n try {\n parseGitUrl(source.url);\n } catch (error) {\n throw new ValidationError(\n `Plugin '${pluginName}' has invalid Git URL: ${source.url}. ` +\n `Error: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n \n return {\n type: 'git',\n gitUrl: source.url,\n gitRef: source.ref,\n gitPath: source.path,\n rawSource: source\n };\n}\n\n/**\n * Check if a normalized source is a relative path.\n */\nexport function isRelativePathSource(source: NormalizedPluginSource): boolean {\n return source.type === 'relative-path';\n}\n\n/**\n * Check if a normalized source is a git source (GitHub or Git URL).\n */\nexport function isGitSource(source: NormalizedPluginSource): boolean {\n return source.type === 'git';\n}\n", "/**\n * Phase 6: Enhanced error messages for resource installation.\n * \n * Provides helpful, actionable error messages with suggestions\n * for common failure scenarios.\n */\n\nimport { extractAllFromPatterns } from './pattern-matcher.js';\n\n/**\n * Format an error message for when no pattern matches a resource path.\n * Provides helpful suggestions based on the path structure.\n * \n * @param resourcePath - The path that didn't match\n * @param platformsConfig - Platforms configuration for pattern extraction\n * @returns Formatted error message with suggestions\n */\nexport function formatNoPatternMatchError(\n resourcePath: string,\n platformsConfig: any\n): string {\n // Extract patterns from platforms config\n const patterns = extractAllFromPatterns(platformsConfig);\n const uniquePatterns = Array.from(new Set(patterns));\n \n // Analyze the path to provide specific suggestions\n const pathSegments = resourcePath.split('/').filter(s => s.length > 0);\n const suggestions: string[] = [];\n \n // Check if path contains common directory names\n if (pathSegments.some(s => s === 'agents' || s.includes('agent'))) {\n suggestions.push('\u2022 Did you mean to install an agent? Ensure your path follows the agents/**/*.md pattern');\n }\n \n if (pathSegments.some(s => s === 'skills' || s.includes('skill'))) {\n suggestions.push('\u2022 Did you mean to install a skill? Ensure your path follows the skills/**/* pattern');\n }\n \n if (pathSegments.some(s => s === 'rules' || s.includes('rule'))) {\n suggestions.push('\u2022 Did you mean to install a rule? Ensure your path follows the rules/**/*.md pattern');\n }\n \n if (pathSegments.some(s => s === 'commands' || s.includes('command'))) {\n suggestions.push('\u2022 Did you mean to install a command? Ensure your path follows the commands/**/*.md pattern');\n }\n \n // Build the error message\n let message = `Path '${resourcePath}' does not match any installable pattern.\\n\\n`;\n message += `Installable patterns include:\\n`;\n \n // Show most common patterns first\n const commonPatterns = [\n 'agents/**/*.md',\n 'skills/**/*',\n 'rules/**/*.md',\n 'commands/**/*.md'\n ];\n \n for (const pattern of commonPatterns) {\n if (uniquePatterns.includes(pattern)) {\n message += ` \u2022 ${pattern}\\n`;\n }\n }\n \n // Show other patterns if they exist\n const otherPatterns = uniquePatterns.filter(p => !commonPatterns.includes(p));\n if (otherPatterns.length > 0 && otherPatterns.length <= 5) {\n for (const pattern of otherPatterns) {\n message += ` \u2022 ${pattern}\\n`;\n }\n } else if (otherPatterns.length > 5) {\n message += ` \u2022 ... and ${otherPatterns.length} more patterns\\n`;\n }\n \n if (suggestions.length > 0) {\n message += `\\n\uD83D\uDCA1 Suggestions:\\n`;\n message += suggestions.join('\\n');\n }\n \n return message;\n}\n\n/**\n * Format an error message for when a resource is not found.\n * \n * @param resourceName - Name of the resource\n * @param resourceType - Type of resource (agent, skill, etc.)\n * @param availableResources - List of available resources (if any)\n * @returns Formatted error message\n */\nexport function formatResourceNotFoundError(\n resourceName: string,\n resourceType: 'agent' | 'skill' | 'plugin',\n availableResources?: string[]\n): string {\n let message = `${resourceType.charAt(0).toUpperCase() + resourceType.slice(1)} '${resourceName}' not found.\\n`;\n \n if (availableResources && availableResources.length > 0) {\n message += `\\nAvailable ${resourceType}s:\\n`;\n const sortedResources = [...availableResources].sort();\n for (const resource of sortedResources.slice(0, 10)) {\n message += ` \u2022 ${resource}\\n`;\n }\n if (sortedResources.length > 10) {\n message += ` \u2022 ... and ${sortedResources.length - 10} more\\n`;\n }\n \n // Try to find similar names\n const similar = findSimilarNames(resourceName, availableResources);\n if (similar.length > 0) {\n message += `\\n\uD83D\uDCA1 Did you mean:\\n`;\n for (const name of similar.slice(0, 3)) {\n message += ` \u2022 ${name}\\n`;\n }\n }\n }\n \n return message;\n}\n\n/**\n * Find names similar to the target name using simple string distance.\n * \n * @param target - The target name to match\n * @param candidates - List of candidate names\n * @returns Array of similar names (up to 3)\n */\nfunction findSimilarNames(target: string, candidates: string[]): string[] {\n const targetLower = target.toLowerCase();\n \n // Score each candidate\n const scored = candidates.map(candidate => ({\n name: candidate,\n score: calculateSimilarity(targetLower, candidate.toLowerCase())\n }));\n \n // Sort by score (higher is better)\n scored.sort((a, b) => b.score - a.score);\n \n // Return top matches with score > 0.5\n return scored\n .filter(s => s.score > 0.5)\n .slice(0, 3)\n .map(s => s.name);\n}\n\n/**\n * Calculate similarity between two strings.\n * Uses a simple character-based similarity metric.\n * \n * @param a - First string\n * @param b - Second string\n * @returns Similarity score (0-1, higher is more similar)\n */\nfunction calculateSimilarity(a: string, b: string): number {\n if (a === b) return 1.0;\n if (a.length === 0 || b.length === 0) return 0.0;\n \n // Check for substring match\n if (a.includes(b) || b.includes(a)) {\n return 0.8;\n }\n \n // Simple character overlap metric\n const aChars = new Set(a.split(''));\n const bChars = new Set(b.split(''));\n const overlap = [...aChars].filter(c => bChars.has(c)).length;\n const total = Math.max(aChars.size, bChars.size);\n \n return overlap / total;\n}\n\n/**\n * Format an error message for version specification on sub-paths.\n * \n * @param input - The invalid input string\n * @returns Formatted error message with correct syntax\n */\nexport function formatVersionOnSubPathError(input: string): string {\n // Try to extract the parts to provide a corrected example\n const parts = input.split('@');\n let suggestion = '';\n \n if (parts.length >= 2) {\n // Assume format like: gh@owner/repo/path@version\n const beforeVersion = parts.slice(0, -1).join('@');\n const version = parts[parts.length - 1];\n const versionPart = version.split('/')[0];\n \n // Try to reconstruct: gh@owner/repo@version/path\n const segments = beforeVersion.split('/');\n if (segments.length >= 3) {\n suggestion = `\\n\\nDid you mean: ${segments.slice(0, 3).join('/')}@${versionPart}/${segments.slice(3).join('/')}`;\n }\n }\n \n let message = `Version cannot be specified on sub-paths.\\n\\n`;\n message += `Got: ${input}\\n`;\n message += `Valid format: <package>[@version][/path]\\n`;\n message += `Examples:\\n`;\n message += ` \u2022 gh@owner/repo@v1.0.0/agents/designer.md\\n`;\n message += ` \u2022 my-package@1.0.0/skills/git\\n`;\n message += ` \u2022 @scope/package@2.0.0/agents/architect\\n`;\n \n if (suggestion) {\n message += suggestion;\n }\n \n return message;\n}\n\n\n", "import { execFile } from 'child_process';\nimport { join } from 'path';\nimport { promisify } from 'util';\nimport { rm, rename } from 'fs/promises';\n\nimport { logger } from '../utils/logger.js';\nimport { ValidationError } from '../utils/errors.js';\nimport { exists, ensureDir } from '../utils/fs.js';\nimport { DIR_PATTERNS, FILE_PATTERNS } from '../constants/index.js';\nimport type { OutputPort, UnifiedSpinner } from './ports/output.js';\n\n/**\n * Create a spinner for git operations.\n * When an OutputPort is provided, uses a real animated spinner.\n * Otherwise falls back to debug-level logger (no terminal animation).\n */\nfunction createGitSpinner(output?: OutputPort): UnifiedSpinner {\n if (output) {\n return output.spinner();\n }\n // Fallback: logger-only spinner (core package default, no terminal dependency)\n return {\n start(message: string) { logger.debug(message); },\n stop(_finalMessage?: string) { /* no-op */ },\n message(text: string) { logger.debug(text); },\n };\n}\n\nimport {\n getGitCommitCacheDir,\n getGitCachePath,\n getGitRepoCacheDir,\n writeRepoMetadata,\n writeCommitMetadata,\n readCommitMetadata,\n touchCacheEntry,\n isCommitCached\n} from '../utils/git-cache.js';\nimport { createCacheManager } from './cache-manager.js';\n\nconst execFileAsync = promisify(execFile);\nconst cacheManager = createCacheManager();\n\nexport interface GitCloneOptions {\n url: string;\n ref?: string; // branch/tag/sha\n subdir?: string; // subdir within repository\n skipCache?: boolean; // Force fresh clone, bypass ref cache (for --remote flag)\n output?: OutputPort; // Optional output port for spinner feedback\n spinner?: UnifiedSpinner; // Optional pre-existing spinner to reuse (avoids nested spinners)\n}\n\nexport interface GitCloneResult {\n path: string; // Full path to clone (including subdir if specified)\n commitSha: string; // Resolved commit SHA (7 chars)\n repoPath: string; // Path to repository root\n}\n\nfunction isSha(ref: string): boolean {\n return /^[0-9a-f]{7,40}$/i.test(ref);\n}\n\nfunction isFullSha(ref: string): boolean {\n return /^[0-9a-f]{40}$/i.test(ref);\n}\n\nasync function runGit(args: string[], cwd?: string): Promise<string> {\n try {\n const result = await execFileAsync('git', args, { cwd });\n return result.stdout.trim();\n } catch (error: any) {\n const message = error?.stderr?.toString?.().trim?.() || error?.message || String(error);\n throw new ValidationError(`Git command failed: ${message}`);\n }\n}\n\n/**\n * Get the current commit SHA of a Git repository.\n */\nasync function getCurrentCommitSha(repoPath: string): Promise<string> {\n const fullSha = await runGit(['rev-parse', 'HEAD'], repoPath);\n return fullSha.substring(0, 7);\n}\n\n/**\n * Resolve a ref to a commit SHA using ls-remote (without cloning).\n * Returns the 7-char short SHA or null if resolution fails.\n * \n * When ref is undefined or 'HEAD', resolves the default branch.\n */\nasync function resolveRefWithLsRemote(url: string, ref?: string): Promise<string | null> {\n try {\n const targetRef = ref || 'HEAD';\n const output = await runGit(['ls-remote', url, targetRef]);\n if (!output) {\n return null;\n }\n const match = output.match(/^([0-9a-f]{40})\\s/i);\n return match ? match[1].substring(0, 7) : null;\n } catch {\n return null;\n }\n}\n\nconst SEMVER_TAG_PATTERN = /^v?\\d+\\.\\d+\\.\\d+(?:[-+].*)?$/;\n\n/**\n * Check if a ref is immutable (full SHA or semver tag).\n * Immutable refs never change, so we can trust cached mappings forever.\n */\nfunction isImmutableRef(ref: string): boolean {\n return isFullSha(ref) || SEMVER_TAG_PATTERN.test(ref);\n}\n\n/**\n * Clone a Git repository to the structured cache.\n * Uses shallow clones (--depth 1) for space efficiency.\n * \n * Cache structure:\n * ~/.openpackage/cache/git/<url-hash>/<commit-sha-7>/\n * \n * Returns the path to the cloned repository (or subdir if specified).\n */\nexport async function cloneRepoToCache(options: GitCloneOptions): Promise<GitCloneResult> {\n const { url, ref, subdir, skipCache, output, spinner: externalSpinner } = options;\n \n // Format URL for display (extract repo name from URL)\n const getDisplayUrl = () => {\n const match = url.match(/([^/]+\\/[^/]+?)(?:\\.git)?$/);\n return match ? match[1] : url;\n };\n \n // Helper to check cache and return result if hit\n const tryCache = async (shortSha: string, source: string): Promise<GitCloneResult | null> => {\n if (await isCommitCached(url, shortSha)) {\n const commitDir = getGitCommitCacheDir(url, shortSha);\n await touchCacheEntry(commitDir);\n const finalPath = subdir ? join(commitDir, subdir) : commitDir;\n if (!subdir || await exists(finalPath)) {\n logger.debug(`Using cached commit (${source})`, { url, ref, commit: shortSha });\n // Show cache hit to user via OutputPort\n const refDisplay = ref ? `#${ref}` : '';\n const subdirDisplay = subdir ? `/${subdir}` : '';\n logger.info(`Using cached ${getDisplayUrl()}${refDisplay}${subdirDisplay} [${shortSha}]`);\n return { path: finalPath, commitSha: shortSha, repoPath: commitDir };\n }\n }\n return null;\n };\n \n // CACHE CHECK: Skip if skipCache is set (--remote flag forces fresh fetch)\n if (!skipCache) {\n // Case 1: Full SHA provided - check cache directly (no network needed)\n if (ref && isFullSha(ref)) {\n const shortSha = ref.substring(0, 7);\n const cached = await tryCache(shortSha, 'full SHA');\n if (cached) return cached;\n }\n \n // Case 2: Immutable ref (semver tag) - trust ref cache without TTL\n if (ref && isImmutableRef(ref) && !isFullSha(ref)) {\n const cachedCommit = await cacheManager.getCachedCommitForRef(url, ref);\n if (cachedCommit) {\n const cached = await tryCache(cachedCommit, 'immutable ref cache');\n if (cached) return cached;\n }\n }\n \n // Case 3: Mutable ref (branch) or no ref (default branch)\n // Always do ls-remote to get current SHA, then check cache\n // This is the key fix: we now handle ref === undefined\n const refDisplay = ref ? `#${ref}` : '';\n const ownedLsRemoteSpinner = !externalSpinner ? createGitSpinner(output) : undefined;\n \n if (externalSpinner) {\n externalSpinner.message(`Checking ${getDisplayUrl()}${refDisplay}`);\n } else {\n ownedLsRemoteSpinner!.start(`Checking ${getDisplayUrl()}${refDisplay}`);\n }\n \n const resolvedSha = await resolveRefWithLsRemote(url, ref);\n ownedLsRemoteSpinner?.stop();\n \n if (resolvedSha) {\n // Update ref cache for future lookups\n if (ref) {\n await cacheManager.cacheRefCommit(url, ref, resolvedSha);\n }\n \n const cached = await tryCache(resolvedSha, 'ls-remote');\n if (cached) return cached;\n \n // SHA resolved but not cached - will need to clone\n logger.debug('Commit not cached, will clone', { url, ref, resolvedSha });\n }\n }\n \n // Clone to a temporary commit directory (we'll get the actual SHA after cloning)\n const repoDir = getGitRepoCacheDir(url);\n await ensureDir(repoDir);\n \n // Write repo metadata\n await writeRepoMetadata(repoDir, {\n url,\n normalized: url.toLowerCase(),\n lastFetched: new Date().toISOString()\n });\n \n // Create a unique temporary clone location.\n // IMPORTANT: recursive dependency resolution may clone multiple resources from the same repo in parallel.\n // A fixed temp dir causes collisions (\"File exists\") and leaves partial clones behind.\n const tempClonePath = join(\n repoDir,\n `.temp-clone-${process.pid}-${Date.now()}-${Math.random().toString(16).slice(2, 10)}`\n );\n \n logger.debug(`Cloning repository to cache`, { url, ref, subdir });\n \n // Create spinner for git operations\n // When an external spinner is provided, reuse it (only update messages).\n // Otherwise create and own a new spinner.\n const cloneRefDisplay = ref ? `#${ref}` : '';\n const ownedSpinner = !externalSpinner ? createGitSpinner(output) : undefined;\n const spinner = externalSpinner ?? ownedSpinner!;\n \n if (externalSpinner) {\n spinner.message(`Cloning ${getDisplayUrl()}${cloneRefDisplay}`);\n } else {\n spinner.start(`Cloning ${getDisplayUrl()}${cloneRefDisplay}`);\n }\n \n try {\n // Clone repository\n if (ref && isSha(ref)) {\n // SHA: shallow clone default branch, then fetch the sha\n await runGit(['clone', '--depth', '1', url, tempClonePath]);\n spinner.message(`Fetching commit ${ref}`);\n await runGit(['fetch', '--depth', '1', 'origin', ref], tempClonePath);\n spinner.message(`Checking out commit ${ref}`);\n await runGit(['checkout', ref], tempClonePath);\n } else if (ref) {\n // Branch or tag\n await runGit(['clone', '--depth', '1', '--branch', ref, url, tempClonePath]);\n } else {\n // Default branch\n await runGit(['clone', '--depth', '1', url, tempClonePath]);\n }\n \n // Get the actual commit SHA\n spinner.message('Resolving commit SHA');\n const commitSha = await getCurrentCommitSha(tempClonePath);\n const commitDir = getGitCommitCacheDir(url, commitSha);\n \n // Check if this commit is already cached\n if (await isCommitCached(url, commitSha)) {\n logger.debug(`Commit already cached, using existing`, { commitSha, commitDir });\n \n ownedSpinner?.stop();\n \n // Clean up temp clone\n await rm(tempClonePath, { recursive: true, force: true });\n \n // Update access time\n await touchCacheEntry(commitDir);\n \n // Validate subdir if specified\n const finalPath = subdir ? join(commitDir, subdir) : commitDir;\n if (subdir && !(await exists(finalPath))) {\n throw new ValidationError(\n `Subdirectory '${subdir}' does not exist in cached repository ${url}`\n );\n }\n \n return {\n path: finalPath,\n commitSha,\n repoPath: commitDir\n };\n }\n \n // Move temp clone to final location.\n // If another parallel clone won the race and created the commitDir, fall back to using the cached copy.\n try {\n await rename(tempClonePath, commitDir);\n } catch (error: any) {\n const code = error?.code as string | undefined;\n if (code === 'EEXIST' || code === 'ENOTEMPTY') {\n // Another process wrote this commit in parallel.\n await rm(tempClonePath, { recursive: true, force: true });\n } else {\n throw error;\n }\n }\n \n logger.debug(`Moved clone to final cache location`, { commitDir });\n \n // If a parallel clone created commitDir, ensure we treat it as cached from here on.\n if (await isCommitCached(url, commitSha)) {\n await touchCacheEntry(commitDir);\n }\n \n // Write commit metadata\n await writeCommitMetadata(commitDir, {\n url,\n commit: commitSha,\n ref,\n subdir,\n clonedAt: new Date().toISOString(),\n lastAccessed: new Date().toISOString()\n });\n \n // Cache the ref->commit mapping for future lookups\n if (ref) {\n await cacheManager.cacheRefCommit(url, ref, commitSha);\n }\n \n // Validate subdir if specified\n const finalPath = subdir ? join(commitDir, subdir) : commitDir;\n if (subdir && !(await exists(finalPath))) {\n throw new ValidationError(\n `Subdirectory '${subdir}' does not exist in cloned repository ${url}`\n );\n }\n \n // Validate that it's an OpenPackage or Claude Code plugin\n const manifestPath = join(finalPath, FILE_PATTERNS.OPENPACKAGE_YML);\n const hasManifest = await exists(manifestPath);\n \n const pluginManifestPath = join(finalPath, DIR_PATTERNS.CLAUDE_PLUGIN, FILE_PATTERNS.PLUGIN_JSON);\n const hasPluginManifest = await exists(pluginManifestPath);\n \n const marketplaceManifestPath = join(finalPath, DIR_PATTERNS.CLAUDE_PLUGIN, FILE_PATTERNS.MARKETPLACE_JSON);\n const hasMarketplaceManifest = await exists(marketplaceManifestPath);\n \n if (!hasManifest && !hasPluginManifest && !hasMarketplaceManifest) {\n throw new ValidationError(\n `Cloned repository is not an OpenPackage or Claude Code plugin ` +\n `(missing ${FILE_PATTERNS.OPENPACKAGE_YML}, ${DIR_PATTERNS.CLAUDE_PLUGIN}/${FILE_PATTERNS.PLUGIN_JSON}, or ${DIR_PATTERNS.CLAUDE_PLUGIN}/${FILE_PATTERNS.MARKETPLACE_JSON} ` +\n `at ${subdir ? `subdir '${subdir}'` : 'repository root'})`\n );\n }\n \n const refPart = ref ? `#${ref}` : '';\n const subdirPart = subdir ? `&subdirectory=${subdir}` : '';\n \n if (externalSpinner) {\n spinner.message(`Cloned ${getDisplayUrl()}${refPart} [${commitSha}]`);\n } else {\n ownedSpinner!.stop(`Cloned ${getDisplayUrl()}${refPart} [${commitSha}]`);\n }\n \n logger.info(`Cloned git repository ${url}${refPart}${subdirPart} to cache [${commitSha}]`);\n \n return {\n path: finalPath,\n commitSha,\n repoPath: commitDir\n };\n \n } catch (error) {\n ownedSpinner?.stop();\n // Clean up temp clone on error\n if (await exists(tempClonePath)) {\n await rm(tempClonePath, { recursive: true, force: true });\n }\n throw error;\n }\n}\n\n/**\n * Legacy alias for backward compatibility.\n * @deprecated Use cloneRepoToCache instead.\n */\nexport async function cloneRepoToTempDir(options: GitCloneOptions): Promise<string> {\n const result = await cloneRepoToCache(options);\n return result.path;\n}\n", "import { cloneRepoToCache } from '../git-clone.js';\nimport { loadPackageFromPath } from './path-package-loader.js';\nimport { detectPluginType } from './plugin-detector.js';\nimport type { Package } from '../../types/index.js';\nimport type { UnifiedSpinner } from '../ports/output.js';\n\nexport interface GitPackageLoadOptions {\n url: string;\n ref?: string;\n path?: string;\n resourcePath?: string;\n skipCache?: boolean; // Force fresh clone (for --remote flag)\n spinner?: UnifiedSpinner; // Optional pre-existing spinner to reuse\n}\n\nexport interface GitPackageLoadResult {\n pkg: Package | null;\n sourcePath: string;\n repoPath: string;\n commitSha: string;\n isMarketplace: boolean;\n}\n\nexport async function loadPackageFromGit(options: GitPackageLoadOptions): Promise<GitPackageLoadResult> {\n const cloneResult = await cloneRepoToCache({ \n url: options.url, \n ref: options.ref,\n subdir: options.path,\n skipCache: options.skipCache,\n spinner: options.spinner\n });\n \n const { path: sourcePath, repoPath, commitSha } = cloneResult;\n \n // If the caller provided a resourcePath, we must NOT treat a repo-root marketplace\n // as \"install a marketplace\" yet. The upper layer will detect base from the resource path\n // and then load the specific plugin/package base (avoids marketplace selection prompt).\n if (options.resourcePath) {\n return {\n pkg: null,\n sourcePath,\n repoPath,\n commitSha,\n isMarketplace: false\n };\n }\n\n // Check if this is a marketplace first - marketplaces don't have openpackage.yml\n // and need to be handled differently\n const pluginDetection = await detectPluginType(sourcePath);\n if (pluginDetection.isPlugin && pluginDetection.type === 'marketplace') {\n return { \n pkg: null, \n sourcePath, \n repoPath,\n commitSha,\n isMarketplace: true \n };\n }\n \n // Not a marketplace, load as regular package or individual plugin\n // Pass GitHub context for scoped naming\n const pkg = await loadPackageFromPath(sourcePath, {\n gitUrl: options.url,\n path: options.path,\n resourcePath: options.resourcePath,\n repoPath\n });\n \n return { \n pkg, \n sourcePath, \n repoPath,\n commitSha,\n isMarketplace: false \n };\n}\n", "import type { PackageSourceLoader, LoadedPackage } from './base.js';\nimport type { PackageSource } from '../unified/context.js';\nimport type { InstallOptions, ExecutionContext } from '../../../types/index.js';\nimport type { UnifiedSpinner } from '../../ports/output.js';\nimport { SourceLoadError } from './base.js';\nimport { loadPackageFromGit } from '../git-package-loader.js';\nimport { loadPackageFromPath } from '../path-package-loader.js';\nimport { detectPluginType } from '../plugin-detector.js';\nimport { detectBase } from '../base-detector.js';\nimport { getPlatformsState } from '../../../core/platforms.js';\nimport { logger } from '../../../utils/logger.js';\nimport { stat } from 'fs/promises';\nimport { resolve, dirname } from 'path';\n\n/**\n * Loads packages from git repositories\n */\nexport class GitSourceLoader implements PackageSourceLoader {\n canHandle(source: PackageSource): boolean {\n return source.type === 'git';\n }\n \n async load(\n source: PackageSource,\n options: InstallOptions,\n execContext: ExecutionContext,\n spinner?: UnifiedSpinner\n ): Promise<LoadedPackage> {\n if (!source.gitUrl) {\n throw new SourceLoadError(source, 'Git URL is required for git sources');\n }\n \n try {\n // Load package from git\n // Use skipCache when resolutionMode is 'remote-primary' (--remote flag)\n const skipCache = options.resolutionMode === 'remote-primary';\n const result = await loadPackageFromGit({\n url: source.gitUrl,\n ref: source.gitRef,\n path: source.gitPath,\n resourcePath: source.resourcePath,\n skipCache,\n spinner\n });\n \n // Phase 5: If manifest base is present, skip detection (reproducibility)\n let detectedBaseInfo: any = null;\n if (source.manifestBase) {\n // Use base from manifest instead of detecting\n const absoluteBase = resolve(result.repoPath, source.manifestBase);\n detectedBaseInfo = {\n matchType: 'manifest',\n base: absoluteBase,\n baseRelative: source.manifestBase,\n matchedPattern: null\n };\n \n source.detectedBase = absoluteBase;\n \n logger.info('Using base from manifest for git source', {\n base: source.manifestBase,\n absoluteBase\n });\n } else if (source.resourcePath || source.gitPath) {\n // NEW: If a resource path was specified, detect base\n const platformsState = getPlatformsState(execContext.targetDir);\n const platformsConfig = platformsState.config;\n const pathToDetect = source.resourcePath || source.gitPath || '';\n \n detectedBaseInfo = await detectBase(\n pathToDetect,\n result.repoPath,\n platformsConfig\n );\n \n logger.info('Base detection result for git source', {\n matchType: detectedBaseInfo.matchType,\n base: detectedBaseInfo.base,\n matchedPattern: detectedBaseInfo.matchedPattern\n });\n \n // Store detected base in source\n if (detectedBaseInfo.base) {\n source.detectedBase = detectedBaseInfo.base;\n }\n }\n \n // When resourcePath is set, treat as concrete resource install (no marketplace selection).\n // Otherwise, if repo is a marketplace, return placeholder for selection flow.\n if (\n !source.resourcePath &&\n (result.isMarketplace || detectedBaseInfo?.matchType === 'marketplace')\n ) {\n const pluginDetection = await detectPluginType(result.sourcePath);\n \n return {\n metadata: null as any, // Marketplace doesn't have single package\n packageName: '', // Unknown until plugin selection\n version: '0.0.0',\n contentRoot: result.sourcePath,\n source: 'git',\n pluginMetadata: {\n isPlugin: true,\n pluginType: 'marketplace',\n manifestPath: pluginDetection.manifestPath || detectedBaseInfo?.manifestPath\n },\n sourceMetadata: {\n repoPath: result.repoPath,\n commitSha: result.commitSha,\n baseDetection: detectedBaseInfo\n }\n };\n }\n \n // Determine content root.\n // When resourcePath is specified, it is authoritative \u2014 the user explicitly requested\n // this specific resource. The detected base serves as validation/metadata, not resolution.\n //\n // Use detectedBase when it has a meaningful containment relationship with resourceRoot:\n // - detectedBase is at or within resourceRoot (base detection found something more\n // specific, e.g. a nested plugin.json)\n // - resourceRoot is within detectedBase AND detectedBase is NOT the repo root\n // (base detection found the plugin/package root that contains the resource file,\n // e.g. plugin.json at plugins/codebase-cleanup while resource is\n // plugins/codebase-cleanup/agents/code-reviewer.md)\n //\n // The repo root is explicitly excluded from the ancestor check because every\n // resourceRoot trivially starts with repoPath + '/'. When detectedBase collapses\n // to the repo root (e.g. marketplace or top-level pattern match), it does not\n // represent a meaningful package base for the specific resource.\n //\n // Fall back to resourceRoot when detectedBase is unrelated, absent, or the repo root.\n // If resourceRoot is a file, use its parent directory since a content root must be\n // a directory.\n let contentRoot: string;\n if (source.resourcePath) {\n const resourceRoot = resolve(result.repoPath, source.resourcePath);\n const detectedBase = detectedBaseInfo?.base;\n \n if (detectedBase && (\n detectedBase.startsWith(resourceRoot) || // detectedBase at or within resourceRoot\n ( // resourceRoot within detectedBase (file-in-plugin)\n detectedBase !== result.repoPath && // but NOT the repo root (trivially matches everything)\n resourceRoot.startsWith(detectedBase + '/')\n )\n )) {\n // Detected base has a meaningful containment relationship \u2014 use it (always a directory).\n contentRoot = detectedBase;\n } else if (detectedBase && detectedBase !== result.repoPath) {\n // Detected base is a meaningful directory that isn't the repo root.\n // This handles cases where base detection found a valid package root\n // that doesn't strictly contain resourceRoot but is still relevant.\n contentRoot = detectedBase;\n } else {\n // Detected base is absent, is the repo root (marketplace/pattern collapsed),\n // or has no relationship. Fall back to resourceRoot, using dirname if it's a file.\n try {\n const s = await stat(resourceRoot);\n contentRoot = s.isDirectory() ? resourceRoot : dirname(resourceRoot);\n } catch {\n // If stat fails (path doesn't exist), use dirname as safe default\n contentRoot = dirname(resourceRoot);\n }\n }\n \n logger.info('Content root resolved via resourcePath', {\n resourcePath: source.resourcePath,\n detectedBase,\n contentRoot\n });\n } else {\n // No resourcePath: use detected base or sourcePath as before.\n contentRoot = detectedBaseInfo?.base || result.sourcePath;\n }\n \n // Load individual package/plugin\n let sourcePackage = await loadPackageFromPath(contentRoot, {\n gitUrl: source.gitUrl,\n path: source.gitPath,\n resourcePath: source.resourcePath,\n repoPath: result.repoPath,\n marketplaceEntry: source.pluginMetadata?.marketplaceEntry\n });\n \n // Detect plugin type at content root\n const pluginDetection = await detectPluginType(contentRoot);\n \n // When resourcePath is set, the user explicitly requested a specific resource.\n // Never propagate pluginType 'marketplace' in this case \u2014 it would trigger the\n // marketplace selection prompt in applyBaseDetection, which is wrong because we\n // already have enough information to install directly.\n const suppressMarketplace = source.resourcePath && pluginDetection.type === 'marketplace';\n \n const packageName = sourcePackage.metadata.name;\n const version = sourcePackage.metadata.version || '0.0.0';\n \n // Note: Plugin transformation is handled by the main flow, not here\n return {\n metadata: sourcePackage.metadata,\n packageName,\n version,\n contentRoot,\n source: 'git',\n pluginMetadata: (pluginDetection.isPlugin && !suppressMarketplace) ? {\n isPlugin: true,\n pluginType: pluginDetection.type as any, // Can be 'individual', 'marketplace', or 'marketplace-defined'\n manifestPath: pluginDetection.manifestPath\n } : undefined,\n sourceMetadata: {\n repoPath: result.repoPath,\n commitSha: result.commitSha,\n baseDetection: detectedBaseInfo\n }\n };\n } catch (error) {\n if (error instanceof SourceLoadError) {\n throw error;\n }\n const err = error as Error;\n const ref = source.gitRef ? `#${source.gitRef}` : '';\n const subdir = source.gitPath ? ` (path: ${source.gitPath})` : '';\n const causeMsg = err?.message ? ` - ${err.message}` : '';\n throw new SourceLoadError(\n source,\n `Failed to load package from git: ${source.gitUrl}${ref}${subdir}${causeMsg}`,\n err\n );\n }\n }\n \n}\n", "import { promises as fs } from 'fs';\nimport { dirname, join } from 'path';\nimport * as yaml from 'js-yaml';\nimport { FILE_PATTERNS } from '../constants/index.js';\nimport { getLocalOpenPackageDir } from './paths.js';\nimport { exists, ensureDir, readTextFile } from './fs.js';\nimport { FileSystemError } from './errors.js';\nimport { normalizePathForProcessing } from './path-normalization.js';\nimport { logger } from './logger.js';\nimport { WorkspaceIndex, WorkspaceIndexPackage } from '../types/workspace-index.js';\nimport type { WorkspaceIndexFileMapping } from '../types/workspace-index.js';\n\nconst HEADER_COMMENT = '# This file is managed by OpenPackage. Do not edit manually.';\n\nexport interface WorkspaceIndexRecord {\n path: string;\n index: WorkspaceIndex;\n}\n\nexport function getWorkspaceIndexPath(targetDir: string): string {\n return join(getLocalOpenPackageDir(targetDir), FILE_PATTERNS.OPENPACKAGE_INDEX_YML);\n}\n\nfunction sortAndDedupeStrings(values: string[]): string[] {\n return Array.from(new Set(values)).sort();\n}\n\nfunction sortFilesMapping(files: Record<string, any[]>): Record<string, any[]> {\n const sorted: Record<string, any[]> = {};\n const keys = Object.keys(files).sort();\n for (const key of keys) {\n const values = files[key] ?? [];\n // Handle both string[] and (string | WorkspaceIndexFileMapping)[]\n const hasComplex = values.some(v => typeof v === 'object' && v !== null);\n if (hasComplex) {\n // Complex mappings - sort by target path and deduplicate\n const sortedValues = values.sort((a, b) => {\n const targetA = typeof a === 'string' ? a : a.target;\n const targetB = typeof b === 'string' ? b : b.target;\n return targetA.localeCompare(targetB);\n });\n const seen = new Set<string>();\n sorted[key] = sortedValues.filter(item => {\n const target = typeof item === 'string' ? item : item.target;\n if (seen.has(target)) return false;\n seen.add(target);\n return true;\n });\n } else {\n // Simple string array\n sorted[key] = sortAndDedupeStrings(values as string[]);\n }\n }\n return sorted;\n}\n\nfunction sanitizeWorkspaceIndexPackage(entry: any): WorkspaceIndexPackage | null {\n if (!entry || typeof entry !== 'object') return null;\n\n const rawPath = (entry as { path?: unknown }).path;\n if (typeof rawPath !== 'string' || rawPath.trim().length === 0) {\n return null;\n }\n\n const pkg: WorkspaceIndexPackage = {\n path: rawPath,\n files: {}\n };\n\n const rawVersion = (entry as { version?: unknown }).version;\n if (typeof rawVersion === 'string' && rawVersion.trim().length > 0) {\n pkg.version = rawVersion;\n }\n\n const rawDeps = (entry as { dependencies?: unknown }).dependencies;\n if (Array.isArray(rawDeps)) {\n const deps = rawDeps.filter((d): d is string => typeof d === 'string' && d.trim().length > 0);\n if (deps.length > 0) {\n pkg.dependencies = sortAndDedupeStrings(deps);\n }\n }\n\n const rawPlatforms = (entry as { platforms?: unknown }).platforms;\n if (Array.isArray(rawPlatforms)) {\n const platforms = rawPlatforms.filter((p): p is string => typeof p === 'string' && p.trim().length > 0);\n if (platforms.length > 0) {\n pkg.platforms = sortAndDedupeStrings(platforms);\n }\n }\n\n const rawNamespace = (entry as { namespace?: unknown }).namespace;\n if (typeof rawNamespace === 'string' && rawNamespace.trim().length > 0) {\n pkg.namespace = rawNamespace.trim();\n }\n\n const rawFiles = (entry as { files?: unknown }).files;\n if (rawFiles && typeof rawFiles === 'object') {\n const files: Record<string, (string | WorkspaceIndexFileMapping)[]> = {};\n for (const [k, v] of Object.entries(rawFiles as Record<string, unknown>)) {\n if (typeof k !== 'string' || !Array.isArray(v)) continue;\n const normalizedKey = normalizePathForProcessing(k);\n const targets: (string | WorkspaceIndexFileMapping)[] = [];\n for (const item of v as unknown[]) {\n if (typeof item === 'string') {\n const trimmed = item.trim();\n if (!trimmed) continue;\n targets.push(normalizePathForProcessing(trimmed));\n continue;\n }\n if (item && typeof item === 'object') {\n const rawTarget = (item as any).target;\n if (typeof rawTarget !== 'string' || rawTarget.trim().length === 0) continue;\n const mapping: WorkspaceIndexFileMapping = {\n target: normalizePathForProcessing(rawTarget)\n };\n const rawMerge = (item as any).merge;\n if (rawMerge === 'deep' || rawMerge === 'shallow' || rawMerge === 'replace' || rawMerge === 'composite') {\n mapping.merge = rawMerge;\n }\n const rawKeys = (item as any).keys;\n if (Array.isArray(rawKeys)) {\n const cleanedKeys = rawKeys.filter((x: any) => typeof x === 'string' && x.trim().length > 0);\n if (cleanedKeys.length > 0) {\n mapping.keys = cleanedKeys;\n }\n }\n const rawHash = (item as any).hash;\n if (typeof rawHash === 'string' && rawHash.trim().length > 0) {\n mapping.hash = rawHash;\n }\n const rawSourceHash = (item as any).sourceHash;\n if (typeof rawSourceHash === 'string' && rawSourceHash.trim().length > 0) {\n mapping.sourceHash = rawSourceHash;\n }\n targets.push(mapping);\n }\n }\n if (targets.length === 0) continue;\n files[normalizedKey] = targets;\n }\n pkg.files = sortFilesMapping(files);\n }\n\n // Parse marketplace metadata if present\n const rawMarketplace = (entry as { marketplace?: unknown }).marketplace;\n if (rawMarketplace && typeof rawMarketplace === 'object') {\n const url = (rawMarketplace as any).url;\n const commitSha = (rawMarketplace as any).commitSha;\n const pluginName = (rawMarketplace as any).pluginName;\n \n if (typeof url === 'string' && url.trim().length > 0 &&\n typeof commitSha === 'string' && commitSha.trim().length > 0 &&\n typeof pluginName === 'string' && pluginName.trim().length > 0) {\n pkg.marketplace = { url, commitSha, pluginName };\n }\n }\n\n const rawSourceType = (entry as { sourceType?: unknown }).sourceType;\n if (rawSourceType === 'project' || rawSourceType === 'global' || rawSourceType === 'registry' || rawSourceType === 'git') {\n pkg.sourceType = rawSourceType;\n }\n\n const rawParent = (entry as { parent?: unknown }).parent;\n if (typeof rawParent === 'string' && rawParent.trim().length > 0) {\n pkg.parent = rawParent.trim();\n }\n\n const rawInstallScope = (entry as { installScope?: unknown }).installScope;\n if (rawInstallScope === 'full' || rawInstallScope === 'subset') {\n pkg.installScope = rawInstallScope;\n }\n\n return pkg;\n}\n\nfunction sanitizeWorkspaceIndexData(data: any): WorkspaceIndex | null {\n if (!data || typeof data !== 'object') return null;\n const packagesSection = (data as { packages?: unknown }).packages;\n if (!packagesSection || typeof packagesSection !== 'object') {\n return { packages: {} };\n }\n\n const packages: Record<string, WorkspaceIndexPackage> = {};\n for (const [pkgName, pkgEntry] of Object.entries(packagesSection as Record<string, unknown>)) {\n if (typeof pkgName !== 'string' || pkgName.trim().length === 0) continue;\n const sanitized = sanitizeWorkspaceIndexPackage(pkgEntry);\n if (sanitized) {\n packages[pkgName] = sanitized;\n }\n }\n\n return { packages };\n}\n\nexport async function readWorkspaceIndex(targetDir: string): Promise<WorkspaceIndexRecord> {\n const indexPath = getWorkspaceIndexPath(targetDir);\n\n if (!(await exists(indexPath))) {\n return {\n path: indexPath,\n index: { packages: {} }\n };\n }\n\n try {\n const content = await readTextFile(indexPath);\n const parsed = yaml.load(content) as any;\n const sanitized = sanitizeWorkspaceIndexData(parsed);\n if (!sanitized) {\n logger.warn(`Invalid workspace index detected at ${indexPath}, returning empty.`);\n return { path: indexPath, index: { packages: {} } };\n }\n \n // Migrate in-memory on read so consumers see a unified key format.\n // (Disk is only updated when writeWorkspaceIndex is called.)\n return { path: indexPath, index: migrateGitHubPackageNames(sanitized) };\n } catch (error) {\n logger.warn(`Failed to read workspace index at ${indexPath}: ${error}`);\n return { path: indexPath, index: { packages: {} } };\n }\n}\n\n/**\n * Migrate old GitHub package names to new format.\n * Converts:\n * - @username/repo \u2192 gh@username/repo\n * - @username/repo/path \u2192 gh@username/repo/path\n * \n * Also normalizes package names to use full path from git cache location.\n * For example: gh@user/repo/basename \u2192 gh@user/repo/full/path\n */\nfunction migrateGitHubPackageNames(index: WorkspaceIndex): WorkspaceIndex {\n const migratedPackages: Record<string, WorkspaceIndexPackage> = {};\n \n for (const [pkgName, pkgData] of Object.entries(index.packages)) {\n const normalizedPath = pkgData.path.replace(/\\\\/g, '/');\n \n // Detect if this is a git source by checking:\n // 1. No version field (git sources don't have semver versions)\n // 2. Path contains git cache location marker\n const isGitSource = !pkgData.version;\n const isGitCache = normalizedPath.includes('/.openpackage/cache/git/') || \n normalizedPath.includes('.openpackage/cache/git/');\n \n if (!isGitSource && !isGitCache) {\n // Not a git source, keep as-is\n migratedPackages[pkgName] = pkgData;\n continue;\n }\n \n // Extract the actual path from git cache location\n // Format: .openpackage/cache/git/{url-hash}/{commit-hash}/{optional-subpath}\n const gitCacheMatch = normalizedPath.match(/\\.openpackage\\/cache\\/git\\/[^\\/]+\\/[^\\/]+(?:\\/(.+))?$/);\n const actualSubpath = gitCacheMatch?.[1] || undefined;\n \n // Parse package name to extract username/repo\n let username: string | undefined;\n let repo: string | undefined;\n let nameHasGhPrefix = false;\n let nameSubpathFromName: string | undefined;\n \n // Check for gh@ prefix first\n if (pkgName.startsWith('gh@')) {\n nameHasGhPrefix = true;\n const ghMatch = pkgName.match(/^gh@([^\\/]+)\\/([^\\/]+)(?:\\/(.+))?$/);\n if (ghMatch) {\n username = ghMatch[1];\n repo = ghMatch[2];\n nameSubpathFromName = ghMatch[3] || undefined;\n }\n }\n // Check for @ prefix (old format)\n else if (pkgName.startsWith('@')) {\n const atMatch = pkgName.match(/^@([^\\/]+)\\/([^\\/]+)(?:\\/(.+))?$/);\n if (atMatch) {\n username = atMatch[1];\n repo = atMatch[2];\n nameSubpathFromName = atMatch[3] || undefined;\n }\n }\n // Check for no prefix (missing @)\n else {\n const noAtMatch = pkgName.match(/^([^\\/]+)\\/([^\\/]+)(?:\\/(.+))?$/);\n if (noAtMatch) {\n username = noAtMatch[1];\n repo = noAtMatch[2];\n nameSubpathFromName = noAtMatch[3] || undefined;\n }\n }\n \n // If we couldn't extract username/repo, keep original\n if (!username || !repo) {\n migratedPackages[pkgName] = pkgData;\n continue;\n }\n\n // Decide whether to migrate the subpath portion.\n // IMPORTANT: Never \"collapse\" a more-specific resource name to a less-specific path.\n // Example: keep gh@u/r/plugins/x/skills/y even if cache path is plugins/x.\n const nameSubpath = nameSubpathFromName;\n\n let targetSubpath: string | undefined = actualSubpath;\n if (actualSubpath && nameSubpath) {\n const actualNorm = actualSubpath.replace(/\\\\/g, '/');\n const nameNorm = nameSubpath.replace(/\\\\/g, '/');\n\n if (nameNorm === actualNorm) {\n // exact match, ok\n targetSubpath = actualSubpath;\n } else if (actualNorm.endsWith(`/${nameNorm}`)) {\n // Old basename-style name: gh@u/r/basename \u2192 gh@u/r/full/path\n targetSubpath = actualSubpath;\n } else if (nameNorm.startsWith(`${actualNorm}/`)) {\n // Resource-scoped install: keep the more-specific name\n targetSubpath = nameSubpath;\n } else {\n // Unknown relationship: prefer not to rewrite (avoid data loss)\n targetSubpath = nameSubpath;\n }\n } else if (!actualSubpath && nameSubpath) {\n // No cache subpath detected, keep explicit name subpath\n targetSubpath = nameSubpath;\n } else if (actualSubpath && !nameSubpath) {\n // Name has no subpath but cache does \u2192 upgrade to include subpath\n targetSubpath = actualSubpath;\n }\n\n // Build the correct package name (at minimum, normalize to gh@ prefix)\n const correctName = targetSubpath\n ? `gh@${username}/${repo}/${targetSubpath}`\n : `gh@${username}/${repo}`;\n \n // Use the correct name (which might be the same as original if already correct)\n migratedPackages[correctName] = pkgData;\n }\n \n return { packages: migratedPackages };\n}\n\nexport async function writeWorkspaceIndex(record: WorkspaceIndexRecord): Promise<void> {\n const indexPath = record.path;\n \n // Migrate package names before writing\n const migrated = migrateGitHubPackageNames(record.index);\n const packages = migrated.packages ?? {};\n\n const sortedPackages: Record<string, WorkspaceIndexPackage> = {};\n for (const pkgName of Object.keys(packages).sort()) {\n const pkg = packages[pkgName];\n const sortedPkg: WorkspaceIndexPackage = {\n path: pkg.path,\n files: sortFilesMapping(pkg.files ?? {})\n };\n if (pkg.version) {\n sortedPkg.version = pkg.version;\n }\n if (pkg.dependencies && pkg.dependencies.length > 0) {\n sortedPkg.dependencies = sortAndDedupeStrings(pkg.dependencies);\n }\n if (pkg.platforms && pkg.platforms.length > 0) {\n sortedPkg.platforms = sortAndDedupeStrings(pkg.platforms);\n }\n if (pkg.namespace) {\n sortedPkg.namespace = pkg.namespace;\n }\n if (pkg.marketplace) {\n sortedPkg.marketplace = pkg.marketplace;\n }\n if (pkg.sourceType) {\n sortedPkg.sourceType = pkg.sourceType;\n }\n if (pkg.parent) {\n sortedPkg.parent = pkg.parent;\n }\n if (pkg.installScope) {\n sortedPkg.installScope = pkg.installScope;\n }\n sortedPackages[pkgName] = sortedPkg;\n }\n\n await ensureDir(dirname(indexPath));\n\n const body = yaml.dump(\n {\n packages: sortedPackages\n },\n {\n lineWidth: 120,\n sortKeys: true\n }\n );\n\n const serialized = `${HEADER_COMMENT}\\n\\n${body}`;\n const tempPath = `${indexPath}.tmp`;\n try {\n await fs.writeFile(tempPath, serialized, 'utf8');\n await fs.rename(tempPath, indexPath);\n } catch (error) {\n try { await fs.unlink(tempPath); } catch { /* ignore cleanup error */ }\n throw new FileSystemError(`Failed to write workspace index: ${indexPath}`, { path: indexPath, error });\n }\n}\n", "import { join } from 'path';\nimport type { PackageSourceLoader, LoadedPackage } from './base.js';\nimport type { PackageSource } from '../unified/context.js';\nimport type { InstallOptions, ExecutionContext } from '../../../types/index.js';\nimport { SourceLoadError } from './base.js';\nimport { readWorkspaceIndex } from '../../../utils/workspace-index-yml.js';\nimport { resolveDeclaredPath } from '../../../utils/path-resolution.js';\nimport { loadPackageFromPath } from '../path-package-loader.js';\n\n/**\n * Loads packages from workspace index (for apply command)\n */\nexport class WorkspaceSourceLoader implements PackageSourceLoader {\n canHandle(source: PackageSource): boolean {\n return source.type === 'workspace';\n }\n \n async load(\n source: PackageSource,\n options: InstallOptions,\n execContext: ExecutionContext\n ): Promise<LoadedPackage> {\n if (!source.packageName) {\n throw new SourceLoadError(source, 'Package name is required for workspace sources');\n }\n \n try {\n // Check if contentRoot is already set (workspace root install case)\n if (source.contentRoot) {\n const pkg = await loadPackageFromPath(source.contentRoot, {\n packageName: source.packageName\n });\n const metadata = pkg.metadata;\n const version = source.version || metadata.version || '0.0.0';\n \n return {\n metadata,\n packageName: source.packageName,\n version,\n contentRoot: source.contentRoot,\n source: 'workspace',\n pluginMetadata: (pkg as any)._format ? {\n isPlugin: true,\n pluginType: 'individual',\n format: (pkg as any)._format\n } : undefined\n };\n }\n \n // Standard workspace source loading (from index)\n // Read workspace index (use targetDir for workspace location)\n const { index } = await readWorkspaceIndex(execContext.targetDir);\n const entry = index.packages?.[source.packageName];\n \n if (!entry?.path) {\n throw new SourceLoadError(\n source,\n `Package '${source.packageName}' is not installed in this workspace. ` +\n `Run 'opkg install ${source.packageName}' to install it first.`\n );\n }\n \n // Resolve package path (relative to targetDir)\n const resolved = resolveDeclaredPath(entry.path, execContext.targetDir);\n const contentRoot = resolved.absolute;\n \n // Load package metadata (handles regular packages and plugins)\n const pkg = await loadPackageFromPath(contentRoot, {\n packageName: source.packageName,\n gitUrl: source.gitUrl,\n path: source.gitPath\n });\n const metadata = pkg.metadata;\n \n const version = entry.version || metadata.version || '0.0.0';\n \n return {\n metadata,\n packageName: source.packageName,\n version,\n contentRoot,\n source: 'workspace',\n pluginMetadata: (pkg as any)._format ? {\n isPlugin: true,\n pluginType: 'individual',\n format: (pkg as any)._format\n } : undefined\n };\n } catch (error) {\n if (error instanceof SourceLoadError) {\n throw error;\n }\n \n throw new SourceLoadError(\n source,\n `Failed to load package '${source.packageName}' from workspace`,\n error as Error\n );\n }\n }\n \n}\n", "import type { PackageSourceLoader } from './base.js';\nimport type { PackageSource } from '../unified/context.js';\nimport { RegistrySourceLoader } from './registry-source.js';\nimport { PathSourceLoader } from './path-source.js';\nimport { GitSourceLoader } from './git-source.js';\nimport { WorkspaceSourceLoader } from './workspace-source.js';\n\n/**\n * Registry of all available source loaders\n */\nconst loaders: PackageSourceLoader[] = [\n new RegistrySourceLoader(),\n new PathSourceLoader(),\n new GitSourceLoader(),\n new WorkspaceSourceLoader()\n];\n\n/**\n * Get appropriate loader for a source\n */\nexport function getLoaderForSource(source: PackageSource): PackageSourceLoader {\n const loader = loaders.find(l => l.canHandle(source));\n \n if (!loader) {\n throw new Error(`No loader available for source type: ${source.type}`);\n }\n \n return loader;\n}\n", "import type { InstallationContext } from './context.js';\nimport { resolveOutput } from '../../ports/resolve.js';\n\n/**\n * Check if context should update manifest\n */\nexport function shouldUpdateManifest(ctx: InstallationContext): boolean {\n return (\n ctx.mode !== 'apply' &&\n ctx.source.type !== 'workspace' &&\n ctx.options.skipManifestUpdate !== true\n );\n}\n\n/**\n * Add warning to context\n */\nexport function addWarning(ctx: InstallationContext, message: string): void {\n if (!ctx.warnings.includes(message)) {\n ctx.warnings.push(message);\n const out = resolveOutput(ctx.execution);\n out.warn(message);\n }\n}\n\n/**\n * Add error to context\n */\nexport function addError(ctx: InstallationContext, message: string): void {\n if (!ctx.errors.includes(message)) {\n ctx.errors.push(message);\n }\n}\n\n/**\n * Get display name for source\n */\nexport function getSourceDisplayName(ctx: InstallationContext): string {\n const { source } = ctx;\n \n switch (source.type) {\n case 'registry':\n return source.version\n ? `${source.packageName}@${source.version}`\n : source.packageName;\n \n case 'path':\n // For marketplace plugins loaded from cache, show the plugin name\n // instead of exposing the internal cache path\n if (source.pluginMetadata?.marketplaceSource || source.pluginMetadata?.marketplaceEntry) {\n const entryName = source.pluginMetadata.marketplaceEntry?.name;\n return source.packageName || entryName || 'plugin';\n }\n return `${source.packageName} (from ${source.localPath})`;\n \n case 'git':\n const ref = source.gitRef ? `#${source.gitRef}` : '';\n const subdir = source.gitPath ? `&path=${source.gitPath}` : '';\n return `${source.packageName} (git:${source.gitUrl}${ref}${subdir})`;\n \n case 'workspace':\n return `${source.packageName} (workspace)`;\n \n default:\n return source.packageName;\n }\n}\n", "import type { InstallationContext } from '../unified/context.js';\nimport type { LoadedPackage } from '../sources/base.js';\nimport { join, relative } from 'path';\nimport { stat } from 'fs/promises';\nimport { logger } from '../../../utils/logger.js';\nimport { ValidationError } from '../../../utils/errors.js';\n\nexport interface ResourceScopingResult {\n /** Relative path from the base to the resource. Empty string means resource IS the base. */\n relPath: string;\n /** Whether the resource path is a directory on disk. */\n isDirectory: boolean;\n /** The computed glob pattern for file matching (e.g. \"**\", \"dir/**\", or \"file.ts\"). */\n pattern: string;\n}\n\nexport interface ResolveResourceScopingOptions {\n /**\n * When true, throws ValidationError if the resource path does not exist on disk.\n * When false (default), a missing path is treated as a file (isDirectory = false).\n */\n strict?: boolean;\n}\n\n/**\n * Core computation: resolve a resource path relative to a package base and produce a match pattern.\n *\n * This is the single source of truth for the path-math shared by both\n * `computePathScoping()` (direct installs) and the installation planner (recursive installs).\n *\n * @returns A ResourceScopingResult, or `null` if the resource path is outside the base.\n */\nexport async function resolveResourceScoping(\n repoRoot: string,\n baseAbs: string,\n resourcePath: string,\n options?: ResolveResourceScopingOptions\n): Promise<ResourceScopingResult | null> {\n const absResourcePath = join(repoRoot, resourcePath);\n const rawRel = relative(baseAbs, absResourcePath).replace(/\\\\/g, '/');\n\n // Resource is outside the detected base \u2014 caller decides how to handle.\n if (rawRel.startsWith('..')) {\n return null;\n }\n\n // Strip cosmetic \"./\" prefix (when resource is at the same level as base).\n const relToBaseRaw = rawRel.replace(/^\\.\\/?/, '');\n\n let isDirectory = false;\n try {\n const s = await stat(absResourcePath);\n isDirectory = s.isDirectory();\n } catch {\n if (options?.strict) {\n throw new ValidationError(\n `The specified resource path does not exist in the repository: ${resourcePath}\\n\\n` +\n `Please verify the path. The file or directory may have been moved, or you may have meant a different path.`\n );\n }\n // Non-strict: best-effort, default to file\n }\n\n // When relToBaseRaw is \"\" the resource IS the base directory \u2192 pattern \"**\"\n const prefix = relToBaseRaw.replace(/\\/$/, '');\n const pattern = isDirectory ? (prefix ? `${prefix}/**` : '**') : relToBaseRaw;\n\n return { relPath: relToBaseRaw, isDirectory, pattern };\n}\n\nexport type SpecialHandling = 'marketplace' | 'ambiguous';\n\nexport interface AmbiguousBaseMatch {\n pattern: string;\n base: string;\n startIndex: number;\n}\n\nexport interface ApplyBaseDetectionResult {\n specialHandling?: SpecialHandling;\n ambiguousMatches?: AmbiguousBaseMatch[];\n}\n\n/**\n * Apply base detection results from a loader into an InstallationContext.\n *\n * This centralizes logic that used to be duplicated in strategies and the\n * pipeline load phase.\n * \n * Includes state tracking to prevent redundant application when called\n * multiple times (e.g., in strategy preprocessing and load phase).\n */\nexport function applyBaseDetection(\n ctx: InstallationContext,\n loaded: LoadedPackage\n): ApplyBaseDetectionResult {\n const baseDetection: any = loaded.sourceMetadata?.baseDetection;\n if (!baseDetection) {\n return {};\n }\n\n // Check if base detection has already been applied\n if (ctx.source._baseDetectionPerformed) {\n logger.debug('Base detection already applied, skipping redundant application');\n return {};\n }\n\n // If loaders already detected marketplace, surface it for orchestrator routing.\n if (loaded.pluginMetadata?.pluginType === 'marketplace') {\n ctx.source._baseDetectionPerformed = true;\n return { specialHandling: 'marketplace' };\n }\n\n if (baseDetection?.base) {\n ctx.detectedBase = baseDetection.base;\n // Keep source.detectedBase in sync\n ctx.source.detectedBase = baseDetection.base;\n }\n\n if (baseDetection?.matchedPattern && !ctx.matchedPattern) {\n ctx.matchedPattern = baseDetection.matchedPattern;\n }\n\n if (baseDetection?.matchType) {\n ctx.baseSource = baseDetection.matchType as any;\n }\n\n // Keep source contentRoot aligned with detected base (resource model).\n const effectiveContentRoot = ctx.detectedBase || loaded.contentRoot;\n if (effectiveContentRoot) {\n ctx.source.contentRoot = effectiveContentRoot;\n }\n\n // Compute baseRelative when repoRoot is available.\n const repoRoot = loaded.sourceMetadata?.repoPath;\n if (!ctx.baseRelative && repoRoot && ctx.detectedBase) {\n ctx.baseRelative = relative(repoRoot, ctx.detectedBase) || '.';\n } else if (!ctx.baseRelative && loaded.contentRoot && ctx.detectedBase) {\n // Fallback (less ideal if contentRoot already equals detectedBase).\n ctx.baseRelative = relative(loaded.contentRoot, ctx.detectedBase) || '.';\n }\n\n // Mark as performed\n ctx.source._baseDetectionPerformed = true;\n\n if (baseDetection?.matchType === 'marketplace') {\n return { specialHandling: 'marketplace' };\n }\n\n if (baseDetection?.matchType === 'ambiguous' && Array.isArray(baseDetection.ambiguousMatches)) {\n return { specialHandling: 'ambiguous', ambiguousMatches: baseDetection.ambiguousMatches };\n }\n\n return {};\n}\n\n/**\n * Compute resource path scoping for installs targeting a concrete resource.\n *\n * Updates ctx.matchedPattern to scope the install to the specified resourcePath.\n * \n * Includes state tracking to prevent redundant computation when called\n * multiple times (e.g., in strategy preprocessing and load phase).\n *\n * Rules:\n * - If resource resolves to a directory, pattern becomes `<dir>/**`\n * - If resource resolves to a file, pattern becomes `<file>`\n * - If the resource cannot be stat'ed or is outside the detected base, throws ValidationError.\n */\nexport async function computePathScoping(\n ctx: InstallationContext,\n loaded: LoadedPackage,\n resourcePath: string\n): Promise<void> {\n // Check if path scoping has already been computed\n if (ctx._pathScopingPerformed) {\n logger.debug('Path scoping already computed, skipping redundant computation');\n return;\n }\n\n const repoRoot = loaded.sourceMetadata?.repoPath || loaded.contentRoot;\n if (!repoRoot) {\n return;\n }\n\n const baseAbs = ctx.detectedBase || loaded.contentRoot;\n if (!baseAbs) {\n return;\n }\n\n const result = await resolveResourceScoping(repoRoot, baseAbs, resourcePath, { strict: true });\n if (!result) {\n // Path is outside the detected package base - invalid for single-file install\n throw new ValidationError(\n `The specified resource path is outside the package base: ${resourcePath}\\n\\n` +\n `Please verify the path is within the package you are installing.`\n );\n }\n\n ctx.matchedPattern = result.pattern;\n\n // Set installScope based on the scoping result:\n // If the resource IS the base directory (pattern \"**\"), it's a full install.\n // Otherwise it's a subset install targeting a specific file or subdirectory.\n ctx.installScope = result.pattern === '**' ? 'full' : 'subset';\n\n // Mark as performed\n ctx._pathScopingPerformed = true;\n}\n", "/**\n * Load Package Phase\n * Loads package from source using appropriate loader\n */\n\nimport type { InstallationContext } from '../context.js';\nimport { getLoaderForSource } from '../../sources/loader-factory.js';\nimport { addError, getSourceDisplayName } from '../context-helpers.js';\nimport { logger } from '../../../../utils/logger.js';\nimport type { OutputPort, UnifiedSpinner } from '../../../ports/output.js';\nimport { resolveOutput } from '../../../ports/resolve.js';\nimport { applyBaseDetection, computePathScoping } from '../../preprocessing/base-resolver.js';\n\n/**\n * Load package from source\n */\nexport async function loadPackagePhase(ctx: InstallationContext, output?: OutputPort, externalSpinner?: UnifiedSpinner): Promise<void> {\n // Skip if context already has loaded data (preprocessed by strategy)\n // NOTE: We require resolvedPackages to be populated too; otherwise later phases break.\n if (ctx.source.contentRoot && ctx.source.packageName && ctx.resolvedPackages.length > 0) {\n return;\n }\n\n const out = output ?? resolveOutput(ctx.execution);\n // When an external spinner is provided (from the pipeline), reuse it \u2014\n // only update messages, don't start/stop (the caller owns the lifecycle).\n const spinner = externalSpinner ?? out.spinner();\n const isOwned = !externalSpinner;\n\n try {\n // Get appropriate loader\n const loader = getLoaderForSource(ctx.source);\n\n // Display loading message with spinner\n const displayName = getSourceDisplayName(ctx);\n const spinnerMsg = `Loading ${displayName}`;\n if (isOwned) {\n spinner.start(spinnerMsg);\n } else {\n spinner.message(spinnerMsg);\n }\n\n // Load package (pass spinner so inner operations can update its message)\n const loaded = await loader.load(ctx.source, ctx.options, ctx.execution, spinner);\n\n // Stop spinner silently when we own it; the report phase will display the\n // \"Installed <name>@<version>\" header so output is consistent.\n // When using an external spinner, the caller manages the lifecycle.\n if (isOwned) {\n spinner.stop();\n }\n\n // Update context\n ctx.source.packageName = loaded.packageName;\n ctx.source.version = loaded.version;\n \n // Apply version fallback chain for resource installations (agents/skills)\n // Priority: resourceVersion (from frontmatter) > metadata.version > parent version > undefined\n if (ctx.source.resourceVersion !== undefined) {\n // Resource has explicit version from frontmatter, use it as final version\n ctx.source.version = ctx.source.resourceVersion;\n } else if (loaded.metadata?.version && loaded.metadata.version !== loaded.version) {\n // Metadata has different version than loader provided (e.g., openpackage.yml in resource dir)\n ctx.source.version = loaded.metadata.version;\n }\n // Otherwise, keep loaded.version (parent package/plugin version)\n \n // Apply base detection results from loader (resource model).\n // Bulk installs previously missed this, causing unscoped installs and incorrect workspace-index paths.\n applyBaseDetection(ctx, loaded);\n\n // Ensure contentRoot is always set after load phase\n // This is required by the pipeline validation and may not be set if base detection was skipped\n if (!ctx.source.contentRoot) {\n ctx.source.contentRoot = loaded.contentRoot;\n }\n\n // If this install targets a concrete resource (file or dir), scope matchedPattern to that resource.\n // This matches the behavior of individual resource installs.\n const resourcePath = (ctx.source as any).resourcePath as string | undefined;\n if (resourcePath) {\n await computePathScoping(ctx, loaded, resourcePath);\n }\n \n ctx.source.pluginMetadata = loaded.pluginMetadata;\n \n // Store commit SHA for git sources (needed for workspace index marketplace metadata)\n if (loaded.sourceMetadata?.commitSha) {\n if (!ctx.source.pluginMetadata) {\n ctx.source.pluginMetadata = { isPlugin: false };\n }\n if (!ctx.source.pluginMetadata.marketplaceSource && loaded.sourceMetadata.commitSha) {\n // Store commit SHA for potential marketplace source tracking\n (ctx.source as any)._commitSha = loaded.sourceMetadata.commitSha;\n }\n }\n \n // Map source type to ResolvedPackage source format\n let resolvedSource: 'local' | 'remote' | 'path' | 'git' | undefined;\n switch (ctx.source.type) {\n case 'registry':\n resolvedSource = 'local'; // Registry packages are local\n break;\n case 'path':\n resolvedSource = 'path';\n break;\n case 'git':\n resolvedSource = 'git';\n break;\n case 'workspace':\n resolvedSource = 'local'; // Workspace packages are local\n break;\n }\n \n // Create root resolved package (simplified - full dependency resolution in next phase)\n // Use the effective version from context (which has fallback chain applied)\n const effectiveVersion = ctx.source.version || loaded.version;\n\n const rootPackage: any = {\n name: loaded.packageName,\n version: effectiveVersion,\n pkg: { \n metadata: loaded.metadata, \n files: [], \n _format: (loaded.metadata as any)?._format || ctx.source.pluginMetadata?.format \n },\n isRoot: true,\n source: resolvedSource,\n contentRoot: ctx.source.contentRoot || loaded.contentRoot // Use detected base as content root\n };\n \n // Add marketplace metadata if present\n if (ctx.source.pluginMetadata?.marketplaceSource) {\n rootPackage.marketplaceMetadata = ctx.source.pluginMetadata.marketplaceSource;\n }\n \n // Add resource version if present (for agents/skills with individual versions)\n if (ctx.source.resourceVersion !== undefined) {\n rootPackage.resourceVersion = ctx.source.resourceVersion;\n }\n \n ctx.resolvedPackages = [rootPackage];\n \n logger.info(`Loaded ${loaded.packageName}@${effectiveVersion} from ${loaded.source}`);\n \n } catch (error) {\n if (isOwned) {\n spinner.stop();\n }\n const errorMsg = `Failed to load package: ${error}`;\n addError(ctx, errorMsg);\n throw new Error(errorMsg);\n }\n}\n", "/**\n * Import Flow Converter Module\n * \n * Converts files using import flows from platforms.jsonc.\n * Applies platform-specific \u2192 universal format transformations.\n * \n * Phase 3: Per-File Import Flow Application\n */\n\nimport { minimatch } from 'minimatch';\nimport { logger } from '../../utils/logger.js';\nimport { getPlatformDefinition } from '../platforms.js';\nimport { getPatternFromFlow, schemaRegistry } from './schema-registry.js';\nimport { applyMapPipeline, createMapContext } from '../flows/map-pipeline/index.js';\nimport { defaultTransformRegistry } from '../flows/flow-transforms.js';\nimport { splitFrontmatter, dumpYaml } from '../markdown-frontmatter.js';\nimport { basename, dirname, extname } from 'path';\nimport { stripPlatformSuffixFromFilename } from '../flows/platform-suffix-handler.js';\nimport { resolveRecursiveGlobTargetRelativePath } from '../glob-target-mapping.js';\nimport { scoreAgainstSchema } from './file-format-detector.js';\nimport type { Flow } from '../../types/flows.js';\nimport type { \n PackageFile,\n PlatformId,\n FileFormat,\n FormatGroup\n} from './detection-types.js';\n\n/**\n * Conversion result for a single file\n */\nexport interface FileConversionResult {\n /** Original file */\n original: PackageFile;\n \n /** Converted file (in universal format) */\n converted?: PackageFile;\n \n /** Whether conversion succeeded */\n success: boolean;\n \n /** Error if conversion failed */\n error?: Error;\n \n /** Flow that was applied */\n appliedFlow?: Flow;\n \n /** Whether file needed transformation */\n transformed: boolean;\n}\n\n/**\n * Conversion result for a format group\n */\nexport interface FormatGroupConversionResult {\n /** Platform ID of this group */\n platformId: PlatformId;\n \n /** Converted files (in universal format) */\n convertedFiles: PackageFile[];\n \n /** Per-file conversion results */\n fileResults: FileConversionResult[];\n \n /** Overall success (all files converted) */\n success: boolean;\n \n /** Number of files processed */\n filesProcessed: number;\n \n /** Number of files successfully converted */\n filesConverted: number;\n \n /** Number of files that failed */\n filesFailed: number;\n}\n\n/**\n * Convert a format group using import flows\n * \n * Loads import flows for the platform and applies them to each file in the group.\n * Returns converted files in universal format.\n * \n * @param group - Format group to convert\n * @param targetDir - Optional target directory for local platform config\n * @returns Conversion result with converted files\n */\nexport function convertFormatGroup(\n group: FormatGroup,\n targetDir?: string\n): FormatGroupConversionResult {\n // Skip conversion for universal format (already in target format)\n if (group.platformId === 'universal') {\n return {\n platformId: group.platformId,\n convertedFiles: group.files,\n fileResults: group.files.map(file => ({\n original: file,\n converted: file,\n success: true,\n transformed: false\n })),\n success: true,\n filesProcessed: group.files.length,\n filesConverted: group.files.length,\n filesFailed: 0\n };\n }\n \n // Skip conversion for unknown format (no flows available)\n if (group.platformId === 'unknown') {\n logger.warn('Group has unknown format, cannot convert');\n return {\n platformId: group.platformId,\n convertedFiles: [],\n fileResults: group.files.map(file => ({\n original: file,\n success: false,\n error: new Error('Unknown format - no conversion flows available'),\n transformed: false\n })),\n success: false,\n filesProcessed: group.files.length,\n filesConverted: 0,\n filesFailed: group.files.length\n };\n }\n \n // Load import flows for platform\n const platform = getPlatformDefinition(group.platformId, targetDir);\n if (!platform) {\n logger.error(`Platform definition not found: ${group.platformId}`);\n return {\n platformId: group.platformId,\n convertedFiles: [],\n fileResults: group.files.map(file => ({\n original: file,\n success: false,\n error: new Error(`Platform definition not found: ${group.platformId}`),\n transformed: false\n })),\n success: false,\n filesProcessed: group.files.length,\n filesConverted: 0,\n filesFailed: group.files.length\n };\n }\n \n const importFlows = platform.import || [];\n if (importFlows.length === 0) {\n logger.warn(`No import flows defined for platform: ${group.platformId}`);\n // Return files unchanged if no flows (treat as already universal)\n return {\n platformId: group.platformId,\n convertedFiles: group.files,\n fileResults: group.files.map(file => ({\n original: file,\n converted: file,\n success: true,\n transformed: false\n })),\n success: true,\n filesProcessed: group.files.length,\n filesConverted: group.files.length,\n filesFailed: 0\n };\n }\n \n // Apply import flows to each file\n const fileResults: FileConversionResult[] = [];\n const convertedFiles: PackageFile[] = [];\n \n for (const file of group.files) {\n const result = convertSingleFile(file, importFlows, group.platformId);\n fileResults.push(result);\n \n if (result.success && result.converted) {\n convertedFiles.push(result.converted);\n }\n }\n \n const filesConverted = fileResults.filter(r => r.success).length;\n const filesFailed = fileResults.filter(r => !r.success).length;\n \n return {\n platformId: group.platformId,\n convertedFiles,\n fileResults,\n success: filesFailed === 0,\n filesProcessed: group.files.length,\n filesConverted,\n filesFailed\n };\n}\n\n/**\n * Convert a single file using import flows\n * \n * Matches file against flows, applies transformations, converts to universal format.\n * \n * @param file - File to convert\n * @param flows - Import flows to apply\n * @param platformId - Source platform ID\n * @returns Conversion result\n */\nexport function convertSingleFile(\n file: PackageFile,\n flows: Flow[],\n platformId: PlatformId\n): FileConversionResult {\n // Find matching flow for this file\n const matchedFlow = findMatchingFlow(file.path, flows);\n \n if (!matchedFlow) {\n // Fallback: schema-based flow match\n // This handles cases where platform-formatted content exists at a universal path\n // (e.g. Claude-formatted agent in `agents/` rather than `.claude/agents/`).\n const schemaFlow = findBestSchemaMatchingFlow(file, flows, platformId);\n if (schemaFlow) {\n try {\n const converted = applyFlowToFile(file, schemaFlow, platformId);\n return {\n original: file,\n converted,\n success: true,\n appliedFlow: schemaFlow,\n transformed: true\n };\n } catch (error) {\n logger.error(`Failed to convert file (schema fallback): ${file.path}`, error);\n return {\n original: file,\n success: false,\n error: error instanceof Error ? error : new Error(String(error)),\n appliedFlow: schemaFlow,\n transformed: false\n };\n }\n }\n\n // No flow matched - return file unchanged (assume already universal)\n return {\n original: file,\n converted: file,\n success: true,\n transformed: false\n };\n }\n \n try {\n // Apply flow transformation\n const converted = applyFlowToFile(file, matchedFlow, platformId);\n \n return {\n original: file,\n converted,\n success: true,\n appliedFlow: matchedFlow,\n transformed: true\n };\n } catch (error) {\n logger.error(`Failed to convert file: ${file.path}`, error);\n return {\n original: file,\n success: false,\n error: error instanceof Error ? error : new Error(String(error)),\n appliedFlow: matchedFlow,\n transformed: false\n };\n }\n}\n\n/**\n * Find matching flow for a file path\n * \n * Matches file path against flow 'from' patterns using glob matching.\n * Returns first matching flow.\n * \n * @param filePath - File path to match\n * @param flows - Array of flows to check\n * @returns Matching flow or null\n */\nfunction findMatchingFlow(filePath: string, flows: Flow[]): Flow | null {\n for (const flow of flows) {\n // Skip fallback flows - they're catch-alls for unmatched files\n // and should not be applied to files that are already in universal format\n if (flow.fallback) {\n continue;\n }\n\n const pattern = getPatternFromFlow(flow, 'from');\n \n if (!pattern) {\n continue;\n }\n \n // Check if file path matches flow pattern\n if (matchGlob(filePath, pattern)) {\n return flow;\n }\n }\n \n return null;\n}\n\n/**\n * Fallback: Find the best schema-matching flow for a file.\n *\n * Uses the flow's `from` schema (if present) to score against frontmatter.\n * Only considered when glob/path matching fails.\n */\nfunction findBestSchemaMatchingFlow(\n file: PackageFile,\n flows: Flow[],\n platformId: PlatformId\n): Flow | null {\n // Parse frontmatter (if needed)\n let frontmatter = file.frontmatter;\n if (!frontmatter && file.content) {\n const parsed = splitFrontmatter(file.content);\n frontmatter = parsed.frontmatter || {};\n }\n\n if (!frontmatter || Object.keys(frontmatter).length === 0) {\n return null;\n }\n\n let best: { flow: Flow; score: number } | null = null;\n\n for (const flow of flows) {\n const schema = schemaRegistry.getSchemaForFlow(flow, 'from');\n if (!schema) continue;\n\n const match = scoreAgainstSchema(frontmatter, schema, flow, file.path, platformId);\n\n // Ignore extremely weak matches to reduce accidental conversions\n if (match.score <= 0.2) continue;\n\n if (!best || match.score > best.score) {\n best = { flow, score: match.score };\n }\n }\n\n return best?.flow ?? null;\n}\n\n/**\n * Match file path against glob pattern\n */\nfunction matchGlob(filePath: string, pattern: string): boolean {\n try {\n return minimatch(filePath, pattern);\n } catch (error) {\n logger.warn(`Invalid glob pattern: ${pattern}`, error);\n return false;\n }\n}\n\n/**\n * Apply flow transformation to a file\n * \n * Transforms file content using flow's map operations.\n * Handles frontmatter transformation and path transformation.\n * \n * @param file - File to transform\n * @param flow - Flow to apply\n * @param platformId - Source platform ID\n * @returns Transformed file\n */\nfunction applyFlowToFile(\n file: PackageFile,\n flow: Flow,\n platformId: PlatformId\n): PackageFile {\n // Parse frontmatter if not already parsed\n let frontmatter = file.frontmatter;\n let body = '';\n \n if (!frontmatter && file.content) {\n const parsed = splitFrontmatter(file.content);\n frontmatter = parsed.frontmatter;\n body = parsed.body;\n }\n \n // Transform frontmatter using map operations\n let transformedFrontmatter = frontmatter;\n \n if (flow.map && flow.map.length > 0 && frontmatter) {\n // Create map context\n const mapContext = createMapContext({\n filename: basename(file.path),\n dirname: dirname(file.path),\n path: file.path,\n ext: extname(file.path)\n });\n \n // Apply map pipeline\n transformedFrontmatter = applyMapPipeline(\n frontmatter,\n flow.map,\n mapContext,\n defaultTransformRegistry\n );\n }\n \n // Transform path using flow patterns\n let transformedPath = transformPath(file.path, flow);\n // Strip platform suffix from output path (e.g. agents/foo.opencode.md -> agents/foo.md)\n const stripped = stripPlatformSuffixFromFilename(transformedPath);\n if (stripped !== transformedPath) {\n transformedPath = stripped;\n }\n \n // Serialize frontmatter back to content\n let transformedContent = file.content;\n if (transformedFrontmatter && body !== undefined) {\n const serialized = dumpYaml(transformedFrontmatter);\n const yamlBlock = serialized.endsWith('\\n') ? serialized : `${serialized}\\n`;\n transformedContent = `---\\n${yamlBlock}---\\n${body}`;\n }\n \n return {\n path: transformedPath,\n content: transformedContent,\n frontmatter: transformedFrontmatter\n };\n}\n\n/**\n * Transform file path using flow patterns\n * \n * Converts platform-specific path to universal path.\n * Example: .claude/agents/agent.md \u2192 agents/agent.md\n * \n * @param filePath - Original file path\n * @param flow - Flow with from/to patterns\n * @returns Transformed path\n */\nfunction transformPath(filePath: string, flow: Flow): string {\n const fromPattern = getPatternFromFlow(flow, 'from');\n const toPattern = getPatternFromFlow(flow, 'to');\n \n if (!fromPattern || !toPattern) {\n return filePath;\n }\n\n if (!matchGlob(filePath, fromPattern)) {\n return filePath;\n }\n\n if (toPattern.includes('**')) {\n return resolveRecursiveGlobTargetRelativePath(filePath, fromPattern, toPattern);\n }\n\n if (toPattern.includes('*')) {\n const sourceExt = extname(filePath);\n const sourceBase = basename(filePath, sourceExt);\n const toParts = toPattern.split('*');\n const toPrefix = toParts[0];\n const toSuffix = toParts[1] || '';\n const targetExt = toSuffix.startsWith('.') ? toSuffix : (sourceExt + toSuffix);\n return toPrefix + sourceBase + targetExt;\n }\n\n return toPattern;\n}\n\n/**\n * Validate that converted file is in universal format\n * \n * Checks that file has been transformed correctly.\n * \n * @param file - Converted file to validate\n * @returns Whether file is valid universal format\n */\nexport function validateUniversalFormat(file: PackageFile): boolean {\n // Parse frontmatter\n let frontmatter = file.frontmatter;\n \n if (!frontmatter && file.content) {\n const parsed = splitFrontmatter(file.content);\n frontmatter = parsed.frontmatter;\n }\n \n if (!frontmatter) {\n // No frontmatter - technically valid (e.g., skills)\n return true;\n }\n \n // Check for universal format indicators\n // Universal format uses:\n // - tools: array format (not string or object)\n // - model: prefixed format (e.g., \"anthropic/claude-3-5-sonnet-20241022\")\n // - permissions: object format (not permissionMode string)\n \n // Check tools field if present\n if ('tools' in frontmatter) {\n const tools = frontmatter.tools;\n \n // Universal format uses array\n if (!Array.isArray(tools)) {\n return false;\n }\n }\n \n // Check for platform-specific exclusive fields\n // These should not be present in universal format\n const platformExclusiveFields = [\n 'permissionMode', // Claude exclusive\n 'hooks', // Claude exclusive\n 'skills', // Claude exclusive\n 'temperature', // OpenCode exclusive\n 'maxSteps', // OpenCode exclusive\n 'disabled' // OpenCode exclusive\n ];\n \n for (const field of platformExclusiveFields) {\n if (field in frontmatter) {\n return false;\n }\n }\n \n return true;\n}\n\n/**\n * Apply import flows to array of files\n * \n * Convenience function that applies flows to multiple files.\n * \n * @param files - Files to convert\n * @param flows - Import flows to apply\n * @param platformId - Source platform ID\n * @returns Array of converted files\n */\nexport function applyImportFlows(\n files: PackageFile[],\n flows: Flow[],\n platformId: PlatformId\n): PackageFile[] {\n const convertedFiles: PackageFile[] = [];\n \n for (const file of files) {\n const result = convertSingleFile(file, flows, platformId);\n \n if (result.success && result.converted) {\n convertedFiles.push(result.converted);\n } else {\n logger.warn(`Failed to convert file: ${file.path}`, {\n error: result.error?.message\n });\n // Include original file if conversion failed\n convertedFiles.push(file);\n }\n }\n \n return convertedFiles;\n}\n", "/**\n * Map Pipeline Utilities\n * \n * Shared utilities for map operations:\n * - Nested value access (get/set/delete)\n * - Pattern matching (glob and object shapes)\n * - Wildcard handling\n */\n\nimport { minimatch } from 'minimatch';\n\n/**\n * Get nested value using dot notation\n * \n * Examples:\n * - getNestedValue({ a: { b: 1 } }, \"a.b\") \u2192 1\n * - getNestedValue({ a: { b: 1 } }, \"x\") \u2192 undefined\n */\nexport function getNestedValue(obj: any, path: string): any {\n if (!path) {\n return obj;\n }\n\n const keys = path.split('.');\n let current = obj;\n\n for (const key of keys) {\n if (current && typeof current === 'object' && key in current) {\n current = current[key];\n } else {\n return undefined;\n }\n }\n\n return current;\n}\n\n/**\n * Set nested value using dot notation\n * Creates intermediate objects as needed\n * \n * Examples:\n * - setNestedValue({}, \"a.b\", 1) \u2192 { a: { b: 1 } }\n * - setNestedValue({ a: { c: 2 } }, \"a.b\", 1) \u2192 { a: { c: 2, b: 1 } }\n */\nexport function setNestedValue(obj: any, path: string, value: any): void {\n if (!path) {\n return;\n }\n\n const keys = path.split('.');\n let current = obj;\n\n // Navigate/create nested structure\n for (let i = 0; i < keys.length - 1; i++) {\n const key = keys[i];\n \n if (!(key in current) || typeof current[key] !== 'object' || current[key] === null) {\n current[key] = {};\n }\n \n current = current[key];\n }\n\n // Set final value\n const finalKey = keys[keys.length - 1];\n current[finalKey] = value;\n}\n\n/**\n * Delete nested value using dot notation\n * \n * Examples:\n * - deleteNestedValue({ a: { b: 1 } }, \"a.b\") \u2192 { a: {} }\n */\nexport function deleteNestedValue(obj: any, path: string): void {\n if (!path) {\n return;\n }\n\n const keys = path.split('.');\n let current = obj;\n\n // Navigate to parent\n for (let i = 0; i < keys.length - 1; i++) {\n const key = keys[i];\n \n if (!(key in current) || typeof current[key] !== 'object') {\n return; // Path doesn't exist\n }\n \n current = current[key];\n }\n\n // Delete final key\n const finalKey = keys[keys.length - 1];\n delete current[finalKey];\n}\n\n/**\n * Match value against pattern\n * Supports:\n * - String patterns with glob syntax (*, ?)\n * - Object shape matching\n * - Wildcard * to match any value\n * \n * Examples:\n * - matchPattern(\"anthropic/claude-sonnet-4\", \"anthropic/claude-sonnet-*\") \u2192 true\n * - matchPattern({ edit: \"deny\" }, { edit: \"deny\", bash: \"deny\" }) \u2192 false\n * - matchPattern({ edit: \"deny\", bash: \"deny\" }, { edit: \"deny\", bash: \"deny\" }) \u2192 true\n * - matchPattern(\"anything\", \"*\") \u2192 true\n */\nexport function matchPattern(value: any, pattern: string | object): boolean {\n // Wildcard matches anything\n if (pattern === '*') {\n return true;\n }\n\n // String pattern matching with glob\n if (typeof pattern === 'string') {\n if (typeof value !== 'string') {\n return false;\n }\n return minimatch(value, pattern);\n }\n\n // Object shape matching\n if (typeof pattern === 'object' && pattern !== null) {\n if (typeof value !== 'object' || value === null) {\n return false;\n }\n\n // Check if all pattern keys match\n for (const [key, patternValue] of Object.entries(pattern)) {\n // Handle wildcard in object patterns\n if (key === '*') {\n // Check if all values match the pattern value\n const allValuesMatch = Object.values(value).every(v => {\n if (patternValue === '*') return true;\n return v === patternValue;\n });\n if (!allValuesMatch) return false;\n } else {\n // Exact key and value match required\n if (!(key in value)) {\n return false;\n }\n if (patternValue !== '*' && value[key] !== patternValue) {\n return false;\n }\n }\n }\n\n return true;\n }\n\n // Direct equality for other types\n return value === pattern;\n}\n\n/**\n * Get all flat keys (dot notation) from an object\n * \n * Examples:\n * - getFlatKeys({ a: { b: 1 } }) \u2192 [\"a\", \"a.b\"]\n * - getFlatKeys({ x: 1, y: { z: 2 } }) \u2192 [\"x\", \"y\", \"y.z\"]\n */\nexport function getFlatKeys(obj: any, prefix = ''): string[] {\n const keys: string[] = [];\n\n if (typeof obj !== 'object' || obj === null) {\n return [];\n }\n\n for (const [key, value] of Object.entries(obj)) {\n const fullKey = prefix ? `${prefix}.${key}` : key;\n keys.push(fullKey);\n\n // Recursively get nested keys (but not arrays)\n if (typeof value === 'object' && value !== null && !Array.isArray(value)) {\n keys.push(...getFlatKeys(value, fullKey));\n }\n }\n\n return keys;\n}\n\n/**\n * Parse wildcard pattern into prefix and suffix\n * \n * Examples:\n * - parseWildcard(\"mcp.*\") \u2192 { prefix: \"mcp.\", suffix: \"\" }\n * - parseWildcard(\"*.value\") \u2192 { prefix: \"\", suffix: \".value\" }\n * - parseWildcard(\"config.*.name\") \u2192 { prefix: \"config.\", suffix: \".name\" }\n */\nexport function parseWildcard(pattern: string): { prefix: string; suffix: string } {\n const wildcardIndex = pattern.indexOf('*');\n \n if (wildcardIndex === -1) {\n return { prefix: pattern, suffix: '' };\n }\n\n const prefix = pattern.substring(0, wildcardIndex);\n const suffix = pattern.substring(wildcardIndex + 1);\n\n return { prefix, suffix };\n}\n\n/**\n * Get keys matching wildcard pattern\n * \n * Examples:\n * - getMatchingKeys({ mcp: { a: 1, b: 2 } }, \"mcp.\", \"\") \u2192 [\"mcp.a\", \"mcp.b\"]\n */\nexport function getMatchingKeys(obj: any, prefix: string, suffix: string): string[] {\n const flatKeys = getFlatKeys(obj);\n \n return flatKeys.filter(key => {\n if (prefix && !key.startsWith(prefix)) {\n return false;\n }\n if (suffix && !key.endsWith(suffix)) {\n return false;\n }\n // Ensure there's something between prefix and suffix\n const wildcardPart = extractWildcardPart(key, prefix, suffix);\n return wildcardPart.length > 0;\n });\n}\n\n/**\n * Extract the wildcard part from a matched key\n * \n * Examples:\n * - extractWildcardPart(\"mcp.server1\", \"mcp.\", \"\") \u2192 \"server1\"\n * - extractWildcardPart(\"config.db.name\", \"config.\", \".name\") \u2192 \"db\"\n */\nexport function extractWildcardPart(key: string, prefix: string, suffix: string): string {\n let result = key;\n \n if (prefix) {\n result = result.substring(prefix.length);\n }\n \n if (suffix) {\n result = result.substring(0, result.length - suffix.length);\n }\n \n return result;\n}\n\n/**\n * Resolve wildcard paths in a document\n * \n * Supports wildcards (*) at any level to match dynamic keys.\n * \n * Examples:\n * - resolveWildcardPaths({ mcp: { a: { x: 1 }, b: { x: 2 } } }, \"mcp.*.x\")\n * \u2192 [\"mcp.a.x\", \"mcp.b.x\"]\n * \n * - resolveWildcardPaths({ servers: { s1: { h: {} }, s2: {} } }, \"servers.*.h\")\n * \u2192 [\"servers.s1.h\"]\n * (Note: servers.s2.h is NOT included because it doesn't exist)\n * \n * @param document - Document to search\n * @param pattern - Field path with wildcards (* for any segment)\n * @returns Array of resolved paths that match the pattern\n */\nexport function resolveWildcardPaths(\n document: any,\n pattern: string\n): string[] {\n const segments = pattern.split('.');\n const results: string[] = [];\n\n function traverse(obj: any, depth: number, currentPath: string[]) {\n if (depth >= segments.length) {\n // Reached end of pattern - this is a match\n results.push(currentPath.join('.'));\n return;\n }\n\n const segment = segments[depth];\n\n if (segment === '*') {\n // Wildcard - match all keys at this level\n if (typeof obj === 'object' && obj !== null && !Array.isArray(obj)) {\n for (const key of Object.keys(obj)) {\n traverse(obj[key], depth + 1, [...currentPath, key]);\n }\n }\n } else {\n // Literal segment - must match exactly\n if (typeof obj === 'object' && obj !== null && segment in obj) {\n traverse(obj[segment], depth + 1, [...currentPath, segment]);\n }\n }\n }\n\n traverse(document, 0, []);\n return results;\n}\n\n/**\n * Deep clone an object\n */\nexport function deepClone<T>(obj: T): T {\n if (obj === null || typeof obj !== 'object') {\n return obj;\n }\n\n if (Array.isArray(obj)) {\n return obj.map(item => deepClone(item)) as any;\n }\n\n const cloned: any = {};\n for (const [key, value] of Object.entries(obj)) {\n cloned[key] = deepClone(value);\n }\n\n return cloned;\n}\n", "/**\n * Map Context Resolution\n * \n * Resolves context variables ($$filename, $$dirname, etc.) in map operations.\n */\n\nimport type { MapContext } from './types.js';\n\n/**\n * Resolve context variables in a value\n * \n * Context variables use $$ prefix:\n * - $$filename \u2192 context.filename\n * - $$dirname \u2192 context.dirname\n * - $$path \u2192 context.path\n * - $$ext \u2192 context.ext\n * \n * Literal values (no $$ prefix) are returned unchanged.\n * Escaped literal (\\$$) has backslash removed to get literal \"$$\".\n * \n * Examples:\n * - resolveValue(\"$$filename\", ctx) \u2192 \"my-agent\"\n * - resolveValue(\"static-value\", ctx) \u2192 \"static-value\"\n * - resolveValue(\"\\\\$$literal\", ctx) \u2192 \"$$literal\"\n * - resolveValue({ name: \"$$filename\" }, ctx) \u2192 { name: \"my-agent\" }\n */\nexport function resolveValue(value: any, context: MapContext): any {\n // Handle strings with context variables\n if (typeof value === 'string') {\n // Check for escaped literal (\\$$)\n if (value.startsWith('\\\\$$')) {\n return value.substring(1); // Remove escape backslash\n }\n\n // Check for context variable ($$)\n if (value.startsWith('$$')) {\n const varName = value.substring(2) as keyof MapContext;\n \n // Return context value if it exists\n if (varName in context) {\n return context[varName];\n }\n \n // Return original value if context variable doesn't exist\n return value;\n }\n\n // Literal string - return as-is\n return value;\n }\n\n // Handle objects - recursively resolve nested values\n if (typeof value === 'object' && value !== null) {\n if (Array.isArray(value)) {\n return value.map(item => resolveValue(item, context));\n }\n\n const result: any = {};\n for (const [key, val] of Object.entries(value)) {\n result[key] = resolveValue(val, context);\n }\n return result;\n }\n\n // Return primitive values unchanged\n return value;\n}\n\n/**\n * Check if a value contains context variables\n */\nexport function hasContextVariables(value: any): boolean {\n if (typeof value === 'string') {\n return value.startsWith('$$') && !value.startsWith('\\\\$$');\n }\n\n if (typeof value === 'object' && value !== null) {\n if (Array.isArray(value)) {\n return value.some(item => hasContextVariables(item));\n }\n\n return Object.values(value).some(val => hasContextVariables(val));\n }\n\n return false;\n}\n", "/**\n * $set Operation\n * \n * Sets field values with context variable resolution.\n * Supports dot notation for nested fields.\n */\n\nimport type { SetOperation, MapContext } from '../types.js';\nimport { setNestedValue } from '../utils.js';\nimport { resolveValue } from '../context.js';\n\n/**\n * Execute $set operation\n * \n * Examples:\n * - { \"$set\": { \"name\": \"$$filename\" } }\n * - { \"$set\": { \"name\": \"$$filename\", \"version\": \"1.0.0\" } }\n * - { \"$set\": { \"config.model\": \"sonnet\" } }\n */\nexport function executeSet(\n document: any,\n operation: SetOperation,\n context: MapContext\n): any {\n const result = { ...document };\n const fields = operation.$set;\n\n for (const [fieldPath, value] of Object.entries(fields)) {\n // Resolve context variables in the value\n const resolvedValue = resolveValue(value, context);\n \n // Set the value using dot notation\n setNestedValue(result, fieldPath, resolvedValue);\n }\n\n return result;\n}\n\n/**\n * Validate $set operation\n */\nexport function validateSet(operation: SetOperation): { valid: boolean; errors: string[] } {\n const errors: string[] = [];\n\n if (!operation.$set || typeof operation.$set !== 'object') {\n errors.push('$set must be an object');\n return { valid: false, errors };\n }\n\n if (Object.keys(operation.$set).length === 0) {\n errors.push('$set must have at least one field');\n }\n\n for (const [key, value] of Object.entries(operation.$set)) {\n if (!key || key.trim() === '') {\n errors.push('$set field path cannot be empty');\n }\n }\n\n return {\n valid: errors.length === 0,\n errors,\n };\n}\n", "/**\n * $rename Operation\n * \n * Renames fields with support for:\n * - Simple renames\n * - Nested paths with dot notation\n * - Wildcard patterns (mcp.* \u2192 mcpServers.*)\n */\n\nimport type { RenameOperation } from '../types.js';\nimport {\n getNestedValue,\n setNestedValue,\n deleteNestedValue,\n parseWildcard,\n getMatchingKeys,\n extractWildcardPart,\n} from '../utils.js';\n\n/**\n * Execute $rename operation\n * \n * Examples:\n * - { \"$rename\": { \"oldName\": \"newName\" } }\n * - { \"$rename\": { \"mcp.*\": \"mcpServers.*\" } }\n * - { \"$rename\": { \"config.old\": \"settings.new\" } }\n */\nexport function executeRename(\n document: any,\n operation: RenameOperation\n): any {\n const result = { ...document };\n const mappings = operation.$rename;\n\n for (const [oldPath, newPath] of Object.entries(mappings)) {\n if (oldPath.includes('*')) {\n // Handle wildcard rename\n renameWithWildcard(result, oldPath, newPath);\n } else {\n // Simple rename\n renameSimple(result, oldPath, newPath);\n }\n }\n\n return result;\n}\n\n/**\n * Simple rename without wildcards\n */\nfunction renameSimple(obj: any, oldPath: string, newPath: string): void {\n const value = getNestedValue(obj, oldPath);\n \n if (value !== undefined) {\n setNestedValue(obj, newPath, value);\n deleteNestedValue(obj, oldPath);\n }\n}\n\n/**\n * Rename with wildcard patterns\n * Both oldPath and newPath must have exactly one wildcard\n */\nfunction renameWithWildcard(obj: any, oldPattern: string, newPattern: string): void {\n // Parse wildcards\n const { prefix: oldPrefix, suffix: oldSuffix } = parseWildcard(oldPattern);\n const { prefix: newPrefix, suffix: newSuffix } = parseWildcard(newPattern);\n\n // Find all matching keys\n const matchingKeys = getMatchingKeys(obj, oldPrefix, oldSuffix);\n\n // Rename each matching key\n for (const oldKey of matchingKeys) {\n // Extract the wildcard part\n const wildcardPart = extractWildcardPart(oldKey, oldPrefix, oldSuffix);\n \n // Construct new key\n const newKey = newPrefix + wildcardPart + newSuffix;\n \n // Get value and rename\n const value = getNestedValue(obj, oldKey);\n if (value !== undefined) {\n setNestedValue(obj, newKey, value);\n deleteNestedValue(obj, oldKey);\n }\n }\n}\n\n/**\n * Validate $rename operation\n */\nexport function validateRename(operation: RenameOperation): { valid: boolean; errors: string[] } {\n const errors: string[] = [];\n\n if (!operation.$rename || typeof operation.$rename !== 'object') {\n errors.push('$rename must be an object');\n return { valid: false, errors };\n }\n\n if (Object.keys(operation.$rename).length === 0) {\n errors.push('$rename must have at least one field mapping');\n }\n\n for (const [oldPath, newPath] of Object.entries(operation.$rename)) {\n if (!oldPath || oldPath.trim() === '') {\n errors.push('$rename source path cannot be empty');\n }\n\n if (!newPath || newPath.trim() === '') {\n errors.push('$rename target path cannot be empty');\n }\n\n // Validate wildcard usage\n const oldWildcards = (oldPath.match(/\\*/g) || []).length;\n const newWildcards = (newPath.match(/\\*/g) || []).length;\n\n if (oldWildcards !== newWildcards) {\n errors.push(\n `$rename wildcard mismatch: \"${oldPath}\" has ${oldWildcards} wildcard(s), ` +\n `but \"${newPath}\" has ${newWildcards} wildcard(s). Both must have the same count.`\n );\n }\n\n if (oldWildcards > 1) {\n errors.push(`$rename does not support multiple wildcards in one pattern: \"${oldPath}\"`);\n }\n }\n\n return {\n valid: errors.length === 0,\n errors,\n };\n}\n", "/**\n * $unset Operation\n * \n * Removes fields from document.\n * Supports dot notation for nested fields.\n */\n\nimport type { UnsetOperation } from '../types.js';\nimport { deleteNestedValue, resolveWildcardPaths } from '../utils.js';\n\n/**\n * Execute $unset operation\n * \n * Supports wildcard paths for batch deletion.\n * \n * Examples:\n * - { \"$unset\": \"permission\" }\n * - { \"$unset\": [\"permission\", \"legacy\", \"temp\"] }\n * - { \"$unset\": \"config.deprecated\" }\n * - { \"$unset\": \"mcp_servers.*.headers\" } // Wildcard\n */\nexport function executeUnset(\n document: any,\n operation: UnsetOperation\n): any {\n const result = { ...document };\n const fields = Array.isArray(operation.$unset) ? operation.$unset : [operation.$unset];\n\n for (const fieldPath of fields) {\n // Check for wildcard\n if (fieldPath.includes('*')) {\n // Resolve wildcard paths\n const matchedPaths = resolveWildcardPaths(result, fieldPath);\n \n // Delete all matched paths\n for (const path of matchedPaths) {\n deleteNestedValue(result, path);\n }\n } else {\n // Single field\n deleteNestedValue(result, fieldPath);\n }\n }\n\n return result;\n}\n\n/**\n * Validate $unset operation\n */\nexport function validateUnset(operation: UnsetOperation): { valid: boolean; errors: string[] } {\n const errors: string[] = [];\n\n if (!operation.$unset) {\n errors.push('$unset must be a string or array of strings');\n return { valid: false, errors };\n }\n\n const fields = Array.isArray(operation.$unset) ? operation.$unset : [operation.$unset];\n\n if (fields.length === 0) {\n errors.push('$unset must have at least one field');\n }\n\n for (const field of fields) {\n if (typeof field !== 'string') {\n errors.push('$unset field must be a string');\n } else if (!field || field.trim() === '') {\n errors.push('$unset field path cannot be empty');\n }\n }\n\n return {\n valid: errors.length === 0,\n errors,\n };\n}\n", "/**\n * $switch Operation\n * \n * Pattern matching with value replacement.\n * First match wins (like switch statements).\n */\n\nimport type { SwitchOperation } from '../types.js';\nimport { getNestedValue, setNestedValue, matchPattern } from '../utils.js';\n\n/**\n * Execute $switch operation\n * \n * Examples:\n * {\n * \"$switch\": {\n * \"field\": \"model\",\n * \"cases\": [\n * { \"pattern\": \"anthropic/claude-sonnet-*\", \"value\": \"sonnet\" },\n * { \"pattern\": \"anthropic/claude-opus-*\", \"value\": \"opus\" }\n * ],\n * \"default\": \"inherit\"\n * }\n * }\n */\nexport function executeSwitch(\n document: any,\n operation: SwitchOperation\n): any {\n const result = { ...document };\n const { field, cases, default: defaultValue } = operation.$switch;\n\n // Get current value\n const currentValue = getNestedValue(result, field);\n\n // If field doesn't exist, don't apply any transformation\n // This prevents creating fields that weren't in the original document\n if (currentValue === undefined) {\n return result;\n }\n\n // Try each case in order (first match wins)\n for (const { pattern, value } of cases) {\n if (matchPattern(currentValue, pattern)) {\n setNestedValue(result, field, value);\n return result;\n }\n }\n\n // No match - use default if provided (only when field exists)\n if (defaultValue !== undefined) {\n setNestedValue(result, field, defaultValue);\n }\n\n return result;\n}\n\n/**\n * Validate $switch operation\n */\nexport function validateSwitch(operation: SwitchOperation): { valid: boolean; errors: string[] } {\n const errors: string[] = [];\n\n if (!operation.$switch || typeof operation.$switch !== 'object') {\n errors.push('$switch must be an object');\n return { valid: false, errors };\n }\n\n const config = operation.$switch;\n\n if (!config.field || typeof config.field !== 'string') {\n errors.push('$switch.field must be a non-empty string');\n }\n\n if (!config.cases || !Array.isArray(config.cases)) {\n errors.push('$switch.cases must be an array');\n return { valid: errors.length === 0, errors };\n }\n\n if (config.cases.length === 0) {\n errors.push('$switch.cases must have at least one case');\n }\n\n for (let i = 0; i < config.cases.length; i++) {\n const switchCase = config.cases[i];\n \n if (!switchCase || typeof switchCase !== 'object') {\n errors.push(`$switch.cases[${i}] must be an object`);\n continue;\n }\n\n if (!('pattern' in switchCase)) {\n errors.push(`$switch.cases[${i}] must have a \"pattern\" field`);\n }\n\n if (!('value' in switchCase)) {\n errors.push(`$switch.cases[${i}] must have a \"value\" field`);\n }\n }\n\n return {\n valid: errors.length === 0,\n errors,\n };\n}\n", "/**\n * $pipeline Operation (MongoDB-aligned)\n * \n * Pipeline transformation on a field.\n * Renamed from $transform for better MongoDB alignment.\n * All sub-operations now use $ prefix.\n */\n\nimport type { PipelineOperation, PipelineStep, MapContext } from '../types.js';\nimport { getNestedValue, setNestedValue, deleteNestedValue, resolveWildcardPaths } from '../utils.js';\nimport { resolveValue } from '../context.js';\n\n/**\n * Execute $pipeline operation\n * \n * Supports wildcard paths for batch transformations.\n */\nexport function executePipeline(\n document: any,\n operation: PipelineOperation,\n context: MapContext\n): any {\n const result = { ...document };\n const { field, operations } = operation.$pipeline;\n\n // Check if field contains wildcard\n const hasWildcard = field.includes('*');\n\n if (hasWildcard) {\n // Wildcard mode: resolve all matching paths\n const matchedPaths = resolveWildcardPaths(result, field);\n\n if (matchedPaths.length === 0) {\n return result;\n }\n\n // Apply pipeline to each matched path independently\n for (const path of matchedPaths) {\n let value = getNestedValue(result, path);\n\n for (const step of operations) {\n value = applyPipelineStep(value, step, context);\n }\n\n if (value === '' || (Array.isArray(value) && value.length === 0)) {\n deleteNestedValue(result, path);\n } else {\n setNestedValue(result, path, value);\n }\n }\n } else {\n // Single field mode\n let value = getNestedValue(result, field);\n\n // If field doesn't exist (undefined), don't apply transformations\n // This prevents creating fields that weren't in the original document\n if (value === undefined) {\n return result;\n }\n\n for (const step of operations) {\n value = applyPipelineStep(value, step, context);\n }\n\n if (value === '' || (Array.isArray(value) && value.length === 0)) {\n deleteNestedValue(result, field);\n } else {\n setNestedValue(result, field, value);\n }\n }\n\n return result;\n}\n\n/**\n * Apply a single pipeline step\n */\nfunction applyPipelineStep(value: any, step: PipelineStep, context: MapContext): any {\n if ('$filter' in step) {\n return applyFilter(value, step.$filter);\n }\n\n if ('$objectToArray' in step) {\n return applyObjectToArray(value, step.$objectToArray);\n }\n\n if ('$arrayToObject' in step) {\n return applyArrayToObject(value, step.$arrayToObject, context);\n }\n\n if ('$map' in step) {\n return applyMap(value, step.$map);\n }\n\n if ('$reduce' in step) {\n return applyReduce(value, step.$reduce);\n }\n\n if ('$replace' in step) {\n return applyReplace(value, step.$replace);\n }\n\n if ('$partition' in step) {\n return applyPartition(value, step.$partition);\n }\n\n if ('$extract' in step) {\n return applyExtract(value, step.$extract);\n }\n\n if ('$mapValues' in step) {\n return applyMapValues(value, step.$mapValues, context);\n }\n\n if ('$mergeFields' in step) {\n return applyMergeFields(value, step.$mergeFields);\n }\n\n // Unknown step - return unchanged\n return value;\n}\n\n/**\n * $filter step: { \"$filter\": { \"match\": { \"value\": true } } }\n * Keeps entries where value or key matches\n * Matches MongoDB $filter semantics\n */\nfunction applyFilter(value: any, config: { match?: { value?: any; key?: any } }): any {\n if (typeof value !== 'object' || value === null) {\n return value;\n }\n\n const result: any = {};\n const match = config.match || {};\n\n for (const [key, val] of Object.entries(value)) {\n let keep = true;\n\n // Filter by value\n if ('value' in match && val !== match.value) {\n keep = false;\n }\n\n // Filter by key\n if ('key' in match && key !== match.key) {\n keep = false;\n }\n\n if (keep) {\n result[key] = val;\n }\n }\n\n return result;\n}\n\n/**\n * $objectToArray step (matches MongoDB $objectToArray)\n * \n * Examples:\n * - { \"$objectToArray\": { \"extract\": \"keys\" } } \u2192 [\"a\", \"b\"]\n * - { \"$objectToArray\": { \"extract\": \"values\" } } \u2192 [1, 2]\n * - { \"$objectToArray\": { \"extract\": \"entries\" } } \u2192 [[\"a\", 1], [\"b\", 2]]\n * - { \"$objectToArray\": true } \u2192 [[\"a\", 1], [\"b\", 2]] (default)\n */\nfunction applyObjectToArray(\n value: any, \n config: true | { extract?: 'keys' | 'values' | 'entries' }\n): any {\n if (typeof value !== 'object' || value === null) {\n return [];\n }\n\n // Handle boolean shorthand\n if (config === true) {\n return Object.entries(value);\n }\n\n const extract = config.extract || 'entries';\n\n switch (extract) {\n case 'keys':\n return Object.keys(value);\n case 'values':\n return Object.values(value);\n case 'entries':\n return Object.entries(value);\n default:\n return Object.entries(value);\n }\n}\n\n/**\n * $arrayToObject step (matches MongoDB $arrayToObject)\n * Convert array of strings to object with specified value\n * \n * Examples:\n * - { \"$arrayToObject\": { \"value\": true } }\n * [\"bash\", \"read\"] \u2192 { bash: true, read: true }\n * \n * - { \"$arrayToObject\": { \"value\": \"$$filename\" } }\n * [\"tool1\", \"tool2\"] \u2192 { tool1: \"code-reviewer\", tool2: \"code-reviewer\" }\n */\nfunction applyArrayToObject(value: any, config: { value: any }, context: MapContext): any {\n if (!Array.isArray(value)) {\n return value;\n }\n\n // Resolve context variables in the value\n const resolvedValue = resolveValue(config.value, context);\n\n const result: any = {};\n for (const key of value) {\n if (typeof key === 'string') {\n result[key] = resolvedValue;\n }\n }\n\n return result;\n}\n\n/**\n * $map step (inspired by MongoDB $map)\n * Transform each element in array\n * \n * Examples:\n * - { \"$map\": { \"each\": \"capitalize\" } } // String transformation\n * - { \"$map\": { \"each\": \"uppercase\" } }\n * - { \"$map\": { \"each\": \"lowercase\" } }\n * - { \"$map\": { \"replace\": { \"old\": \"new\" } } } // Value replacement using lookup table\n */\nfunction applyMap(\n value: any, \n config: { each?: 'capitalize' | 'uppercase' | 'lowercase'; replace?: Record<string, string> }\n): any {\n if (!Array.isArray(value)) {\n return value;\n }\n\n return value.map(item => {\n if (typeof item !== 'string') {\n return item;\n }\n\n // Replace mode: lookup-based value replacement\n if (config.replace) {\n return config.replace[item] || item; // Return mapped value or original if not found\n }\n\n // Each mode: string transformation\n if (config.each) {\n switch (config.each) {\n case 'capitalize':\n return item.charAt(0).toUpperCase() + item.slice(1);\n case 'uppercase':\n return item.toUpperCase();\n case 'lowercase':\n return item.toLowerCase();\n default:\n return item;\n }\n }\n\n // No transformation specified\n return item;\n });\n}\n\n/**\n * $reduce step (inspired by MongoDB $reduce)\n * Reduces array using common patterns\n * \n * Examples:\n * - { \"$reduce\": { \"type\": \"join\", \"separator\": \", \" } } \u2192 \"a, b, c\"\n * - { \"$reduce\": { \"type\": \"split\", \"separator\": \", \" } } \u2192 [\"a\", \"b\", \"c\"]\n * - { \"$reduce\": { \"type\": \"sum\" } } \u2192 6\n * - { \"$reduce\": { \"type\": \"count\" } } \u2192 3\n */\nfunction applyReduce(\n value: any, \n config: { type: 'join' | 'split' | 'sum' | 'count'; separator?: string }\n): any {\n const { type, separator = '' } = config;\n\n switch (type) {\n case 'join':\n // Join array to string\n if (!Array.isArray(value)) {\n return value;\n }\n return value.join(separator);\n\n case 'split':\n // Split string to array (inverse of join)\n if (typeof value !== 'string') {\n return value;\n }\n return value.split(separator).map(s => s.trim()).filter(s => s.length > 0);\n\n case 'sum':\n // Sum array of numbers\n if (!Array.isArray(value)) {\n return value;\n }\n return value.reduce((sum, n) => sum + (Number(n) || 0), 0);\n\n case 'count':\n // Count array elements\n if (!Array.isArray(value)) {\n return 0;\n }\n return value.length;\n\n default:\n return value;\n }\n}\n\n/**\n * $replace step (similar to MongoDB $replaceOne/$replaceAll)\n * String replacement using regex with capture group support\n * \n * Examples:\n * - { \"$replace\": { \"pattern\": \"^anthropic/\", \"with\": \"\" } }\n * \"anthropic/claude-sonnet\" \u2192 \"claude-sonnet\"\n * \n * - { \"$replace\": { \"pattern\": \"(-[0-9]+)\\\\.([0-9]+)\", \"with\": \"$1-$2\", \"flags\": \"g\" } }\n * \"claude-4.5\" \u2192 \"claude-4-5\"\n * \n * - { \"$replace\": { \"pattern\": \"^(.*)$\", \"with\": \"anthropic/$1\" } }\n * \"claude-sonnet\" \u2192 \"anthropic/claude-sonnet\"\n */\nfunction applyReplace(\n value: any, \n config: { pattern: string; with: string; flags?: string }\n): any {\n if (typeof value !== 'string') {\n return value;\n }\n\n const flags = config.flags || '';\n const regex = new RegExp(config.pattern, flags);\n \n return value.replace(regex, config.with);\n}\n\n// ============================================================================\n// New Atomic Operations\n// ============================================================================\n\n/**\n * $partition step - Split object entries into buckets by pattern\n */\nfunction applyPartition(\n value: any,\n config: { by: 'value' | 'key'; patterns: Record<string, string> }\n): any {\n if (typeof value !== 'object' || value === null) {\n return value;\n }\n\n const result: Record<string, any> = {};\n\n for (const [entryKey, entryValue] of Object.entries(value)) {\n const testValue = config.by === 'value' ? String(entryValue) : entryKey;\n \n for (const [bucketName, pattern] of Object.entries(config.patterns)) {\n const regex = new RegExp(pattern);\n if (regex.test(testValue)) {\n // Only create bucket if it doesn't exist yet\n if (!result[bucketName]) {\n result[bucketName] = {};\n }\n result[bucketName][entryKey] = entryValue;\n break;\n }\n }\n }\n\n return result;\n}\n\n/**\n * $extract step - Extract substring using regex capture groups\n */\nfunction applyExtract(\n value: any,\n config: { pattern: string; group: number; default?: string }\n): any {\n if (typeof value !== 'string') {\n return value;\n }\n\n const regex = new RegExp(config.pattern);\n const match = value.match(regex);\n \n if (!match) {\n if (config.default === '$SELF') {\n return value;\n }\n return config.default !== undefined ? config.default : value;\n }\n\n return match[config.group];\n}\n\n/**\n * $mapValues step - Apply transformation to each value in object\n */\nfunction applyMapValues(\n value: any,\n config: { operations: PipelineStep[] },\n context: MapContext\n): any {\n if (typeof value !== 'object' || value === null) {\n return value;\n }\n\n const result: any = {};\n \n for (const [key, val] of Object.entries(value)) {\n let transformedValue = val;\n \n for (const step of config.operations) {\n transformedValue = applyPipelineStep(transformedValue, step, context);\n }\n \n result[key] = transformedValue;\n }\n\n return result;\n}\n\n/**\n * $mergeFields step - Merge multiple fields into one\n */\nfunction applyMergeFields(\n value: any,\n config: { from: string[]; to: string; remove?: boolean }\n): any {\n if (typeof value !== 'object' || value === null) {\n return value;\n }\n\n const result = { ...value };\n const merged: any = {};\n\n for (const sourceKey of config.from) {\n if (sourceKey in result) {\n const sourceValue = result[sourceKey];\n if (typeof sourceValue === 'object' && sourceValue !== null) {\n Object.assign(merged, sourceValue);\n }\n \n if (config.remove !== false) {\n delete result[sourceKey];\n }\n }\n }\n\n if (Object.keys(merged).length > 0) {\n result[config.to] = merged;\n }\n\n return result;\n}\n\n/**\n * Validate $pipeline operation\n */\nexport function validatePipeline(operation: PipelineOperation): { valid: boolean; errors: string[] } {\n const errors: string[] = [];\n\n if (!operation.$pipeline || typeof operation.$pipeline !== 'object') {\n errors.push('$pipeline must be an object');\n return { valid: false, errors };\n }\n\n const config = operation.$pipeline;\n\n if (!config.field || typeof config.field !== 'string') {\n errors.push('$pipeline.field must be a non-empty string');\n }\n\n if (!config.operations || !Array.isArray(config.operations)) {\n errors.push('$pipeline.operations must be an array');\n return { valid: errors.length === 0, errors };\n }\n\n if (config.operations.length === 0) {\n errors.push('$pipeline.operations must have at least one operation');\n }\n\n for (let i = 0; i < config.operations.length; i++) {\n const step = config.operations[i];\n \n if (!step || typeof step !== 'object') {\n errors.push(`$pipeline.operations[${i}] must be an object`);\n continue;\n }\n\n const stepKeys = Object.keys(step);\n if (stepKeys.length !== 1) {\n errors.push(`$pipeline.operations[${i}] must have exactly one operation`);\n continue;\n }\n\n const operation = stepKeys[0];\n const validOps = [\n '$filter', \n '$objectToArray', \n '$arrayToObject', \n '$map', \n '$reduce', \n '$replace',\n '$partition',\n '$extract',\n '$mapValues',\n '$mergeFields'\n ];\n \n if (!validOps.includes(operation)) {\n errors.push(\n `$pipeline.operations[${i}] has unknown operation \"${operation}\". ` +\n `Valid: ${validOps.join(', ')}`\n );\n }\n\n // Validate specific operations\n if (operation === '$filter') {\n const filterConfig = (step as any).$filter;\n if (filterConfig && typeof filterConfig === 'object' && filterConfig.match) {\n const match = filterConfig.match;\n if (typeof match !== 'object') {\n errors.push(`$pipeline.operations[${i}].$filter.match must be an object`);\n }\n }\n }\n\n if (operation === '$objectToArray') {\n const config = (step as any).$objectToArray;\n if (config !== true && typeof config !== 'object') {\n errors.push(`$pipeline.operations[${i}].$objectToArray must be true or an object`);\n } else if (typeof config === 'object' && config.extract) {\n const validExtracts = ['keys', 'values', 'entries'];\n if (!validExtracts.includes(config.extract)) {\n errors.push(\n `$pipeline.operations[${i}].$objectToArray.extract must be one of: ${validExtracts.join(', ')}`\n );\n }\n }\n }\n\n if (operation === '$arrayToObject') {\n const config = (step as any).$arrayToObject;\n if (!config || typeof config !== 'object') {\n errors.push(`$pipeline.operations[${i}].$arrayToObject must be an object`);\n } else if (!('value' in config)) {\n errors.push(`$pipeline.operations[${i}].$arrayToObject must have a 'value' property`);\n }\n }\n\n if (operation === '$map') {\n const config = (step as any).$map;\n if (!config || typeof config !== 'object') {\n errors.push(`$pipeline.operations[${i}].$map must be an object`);\n } else {\n const hasEach = 'each' in config;\n const hasReplace = 'replace' in config;\n \n // Must have either 'each' or 'replace', but not both\n if (!hasEach && !hasReplace) {\n errors.push(`$pipeline.operations[${i}].$map must have either 'each' or 'replace' property`);\n } else if (hasEach && hasReplace) {\n errors.push(`$pipeline.operations[${i}].$map cannot have both 'each' and 'replace' properties`);\n } else if (hasEach) {\n // Validate 'each' mode (string transformations)\n const validMaps = ['capitalize', 'uppercase', 'lowercase'];\n if (!validMaps.includes(config.each)) {\n errors.push(\n `$pipeline.operations[${i}].$map.each must be one of: ${validMaps.join(', ')}`\n );\n }\n } else if (hasReplace) {\n // Validate 'replace' mode (lookup table)\n if (typeof config.replace !== 'object' || config.replace === null || Array.isArray(config.replace)) {\n errors.push(`$pipeline.operations[${i}].$map.replace must be an object (lookup table)`);\n } else if (Object.keys(config.replace).length === 0) {\n errors.push(`$pipeline.operations[${i}].$map.replace must have at least one mapping`);\n }\n }\n }\n }\n\n if (operation === '$reduce') {\n const config = (step as any).$reduce;\n if (!config || typeof config !== 'object') {\n errors.push(`$pipeline.operations[${i}].$reduce must be an object`);\n } else if (!config.type) {\n errors.push(`$pipeline.operations[${i}].$reduce must have a 'type' property`);\n } else {\n const validTypes = ['join', 'split', 'sum', 'count'];\n if (!validTypes.includes(config.type)) {\n errors.push(\n `$pipeline.operations[${i}].$reduce.type must be one of: ${validTypes.join(', ')}`\n );\n }\n if ((config.type === 'join' || config.type === 'split') && config.separator !== undefined && typeof config.separator !== 'string') {\n errors.push(`$pipeline.operations[${i}].$reduce.separator must be a string`);\n }\n }\n }\n\n if (operation === '$replace') {\n const config = (step as any).$replace;\n if (!config || typeof config !== 'object') {\n errors.push(`$pipeline.operations[${i}].$replace must be an object`);\n } else {\n if (typeof config.pattern !== 'string') {\n errors.push(`$pipeline.operations[${i}].$replace.pattern must be a string`);\n }\n if (typeof config.with !== 'string') {\n errors.push(`$pipeline.operations[${i}].$replace.with must be a string`);\n }\n if (config.flags !== undefined && typeof config.flags !== 'string') {\n errors.push(`$pipeline.operations[${i}].$replace.flags must be a string`);\n }\n }\n }\n\n if (operation === '$partition') {\n const config = (step as any).$partition;\n if (!config || typeof config !== 'object') {\n errors.push(`$pipeline.operations[${i}].$partition must be an object`);\n } else {\n if (!config.by || !['value', 'key'].includes(config.by)) {\n errors.push(`$pipeline.operations[${i}].$partition.by must be \"value\" or \"key\"`);\n }\n if (!config.patterns || typeof config.patterns !== 'object') {\n errors.push(`$pipeline.operations[${i}].$partition.patterns must be an object`);\n } else if (Object.keys(config.patterns).length === 0) {\n errors.push(`$pipeline.operations[${i}].$partition.patterns must have at least one pattern`);\n }\n }\n }\n\n if (operation === '$extract') {\n const config = (step as any).$extract;\n if (!config || typeof config !== 'object') {\n errors.push(`$pipeline.operations[${i}].$extract must be an object`);\n } else {\n if (typeof config.pattern !== 'string') {\n errors.push(`$pipeline.operations[${i}].$extract.pattern must be a string`);\n }\n if (typeof config.group !== 'number') {\n errors.push(`$pipeline.operations[${i}].$extract.group must be a number`);\n }\n }\n }\n\n if (operation === '$mapValues') {\n const config = (step as any).$mapValues;\n if (!config || typeof config !== 'object') {\n errors.push(`$pipeline.operations[${i}].$mapValues must be an object`);\n } else if (!Array.isArray(config.operations)) {\n errors.push(`$pipeline.operations[${i}].$mapValues.operations must be an array`);\n } else if (config.operations.length === 0) {\n errors.push(`$pipeline.operations[${i}].$mapValues.operations must have at least one operation`);\n }\n }\n\n if (operation === '$mergeFields') {\n const config = (step as any).$mergeFields;\n if (!config || typeof config !== 'object') {\n errors.push(`$pipeline.operations[${i}].$mergeFields must be an object`);\n } else {\n if (!Array.isArray(config.from) || config.from.length === 0) {\n errors.push(`$pipeline.operations[${i}].$mergeFields.from must be a non-empty array`);\n }\n if (typeof config.to !== 'string') {\n errors.push(`$pipeline.operations[${i}].$mergeFields.to must be a string`);\n }\n }\n }\n }\n\n return {\n valid: errors.length === 0,\n errors,\n };\n}\n", "/**\n * $copy Operation\n * \n * Copy field with optional pattern-based transformation.\n */\n\nimport type { CopyOperation } from '../types.js';\nimport { getNestedValue, setNestedValue, matchPattern } from '../utils.js';\n\n/**\n * Execute $copy operation\n * \n * Example:\n * {\n * \"$copy\": {\n * \"from\": \"permission\",\n * \"to\": \"permissionMode\",\n * \"transform\": {\n * \"cases\": [\n * { \"pattern\": { \"edit\": \"deny\", \"bash\": \"deny\" }, \"value\": \"plan\" }\n * ],\n * \"default\": \"default\"\n * }\n * }\n * }\n */\nexport function executeCopy(\n document: any,\n operation: CopyOperation\n): any {\n const result = { ...document };\n const { from, to, transform } = operation.$copy;\n\n // Get source value\n const sourceValue = getNestedValue(result, from);\n\n // If source doesn't exist, don't set target\n if (sourceValue === undefined) {\n return result;\n }\n\n let targetValue = sourceValue;\n\n // Apply transformation if provided\n if (transform) {\n targetValue = applyTransform(sourceValue, transform);\n }\n\n // Set target value\n setNestedValue(result, to, targetValue);\n\n return result;\n}\n\n/**\n * Apply pattern-based transformation\n */\nfunction applyTransform(\n value: any,\n transform: { cases: Array<{ pattern: string | object; value: any }>; default?: any }\n): any {\n // Try each case in order (first match wins)\n for (const { pattern, value: caseValue } of transform.cases) {\n if (matchPattern(value, pattern)) {\n return caseValue;\n }\n }\n\n // No match - use default if provided, otherwise return original\n return transform.default !== undefined ? transform.default : value;\n}\n\n/**\n * Validate $copy operation\n */\nexport function validateCopy(operation: CopyOperation): { valid: boolean; errors: string[] } {\n const errors: string[] = [];\n\n if (!operation.$copy || typeof operation.$copy !== 'object') {\n errors.push('$copy must be an object');\n return { valid: false, errors };\n }\n\n const config = operation.$copy;\n\n if (!config.from || typeof config.from !== 'string') {\n errors.push('$copy.from must be a non-empty string');\n }\n\n if (!config.to || typeof config.to !== 'string') {\n errors.push('$copy.to must be a non-empty string');\n }\n\n // Validate transform if provided\n if (config.transform) {\n if (typeof config.transform !== 'object') {\n errors.push('$copy.transform must be an object');\n return { valid: errors.length === 0, errors };\n }\n\n if (!config.transform.cases || !Array.isArray(config.transform.cases)) {\n errors.push('$copy.transform.cases must be an array');\n } else {\n if (config.transform.cases.length === 0) {\n errors.push('$copy.transform.cases must have at least one case');\n }\n\n for (let i = 0; i < config.transform.cases.length; i++) {\n const caseItem = config.transform.cases[i];\n \n if (!caseItem || typeof caseItem !== 'object') {\n errors.push(`$copy.transform.cases[${i}] must be an object`);\n continue;\n }\n\n if (!('pattern' in caseItem)) {\n errors.push(`$copy.transform.cases[${i}] must have a \"pattern\" field`);\n }\n\n if (!('value' in caseItem)) {\n errors.push(`$copy.transform.cases[${i}] must have a \"value\" field`);\n }\n }\n }\n }\n\n return {\n valid: errors.length === 0,\n errors,\n };\n}\n", "/**\n * $pipe Operation\n * \n * Applies external pipe transforms within the map pipeline.\n * Enables interleaving format conversions with schema transformations.\n * \n * This operation bridges the map pipeline (document transformations)\n * with the transform registry (format conversions, validations).\n */\n\nimport type { PipeOperation } from '../types.js';\nimport type { TransformRegistry } from '../../flow-transforms.js';\n\n/**\n * Execute $pipe operation\n * \n * Applies a sequence of transforms from the transform registry.\n * \n * Example:\n * {\n * \"$pipe\": [\"filter-comments\", \"json-to-toml\"]\n * }\n * \n * @param document - Input document\n * @param operation - Pipe operation configuration\n * @param transformRegistry - Transform registry to execute transforms from\n * @returns Transformed document\n */\nexport function executePipe(\n document: any,\n operation: PipeOperation,\n transformRegistry: TransformRegistry\n): any {\n let result = document;\n\n // Apply each transform sequentially\n for (const transformName of operation.$pipe) {\n try {\n result = transformRegistry.execute(transformName, result);\n } catch (error) {\n throw new Error(\n `$pipe transform '${transformName}' failed: ${\n error instanceof Error ? error.message : String(error)\n }`\n );\n }\n }\n\n return result;\n}\n\n/**\n * Validate $pipe operation\n * \n * Ensures the pipe configuration is valid.\n */\nexport function validatePipe(operation: PipeOperation): { valid: boolean; errors: string[] } {\n const errors: string[] = [];\n\n if (!operation.$pipe) {\n errors.push('$pipe must be defined');\n return { valid: false, errors };\n }\n\n if (!Array.isArray(operation.$pipe)) {\n errors.push('$pipe must be an array of transform names');\n return { valid: false, errors };\n }\n\n if (operation.$pipe.length === 0) {\n errors.push('$pipe must have at least one transform');\n }\n\n for (let i = 0; i < operation.$pipe.length; i++) {\n const transformName = operation.$pipe[i];\n \n if (typeof transformName !== 'string') {\n errors.push(`$pipe[${i}] must be a string (transform name)`);\n } else if (!transformName || transformName.trim() === '') {\n errors.push(`$pipe[${i}] transform name cannot be empty`);\n }\n }\n\n return {\n valid: errors.length === 0,\n errors,\n };\n}\n", "/**\n * Map Pipeline\n * \n * MongoDB-inspired document transformation pipeline.\n * Executes operations sequentially on documents.\n */\n\nimport type { MapPipeline, MapContext, Operation, ValidationResult } from './types.js';\nimport type { TransformRegistry } from '../flow-transforms.js';\nimport { deepClone } from './utils.js';\nimport { executeSet, validateSet } from './operations/set.js';\nimport { executeRename, validateRename } from './operations/rename.js';\nimport { executeUnset, validateUnset } from './operations/unset.js';\nimport { executeSwitch, validateSwitch } from './operations/switch.js';\nimport { executePipeline, validatePipeline } from './operations/transform.js';\nimport { executeCopy, validateCopy } from './operations/copy.js';\nimport { executePipe, validatePipe } from './operations/pipe.js';\n\n/**\n * Apply map pipeline to a document\n * \n * Executes operations sequentially, passing the result of each operation\n * to the next operation in the pipeline.\n * \n * @param document - Input document to transform\n * @param pipeline - Array of operations to apply\n * @param context - Context for variable resolution\n * @param transformRegistry - Optional transform registry for $pipe operations\n * @returns Transformed document\n */\nexport function applyMapPipeline(\n document: any,\n pipeline: MapPipeline,\n context: MapContext,\n transformRegistry?: TransformRegistry\n): any {\n // Start with a deep clone to avoid mutating input\n let result = deepClone(document);\n\n // Execute each operation in sequence\n for (const operation of pipeline) {\n result = executeOperation(result, operation, context, transformRegistry);\n }\n\n return result;\n}\n\n/**\n * Execute a single operation\n */\nfunction executeOperation(\n document: any,\n operation: Operation,\n context: MapContext,\n transformRegistry?: TransformRegistry\n): any {\n if ('$set' in operation) {\n return executeSet(document, operation, context);\n }\n\n if ('$rename' in operation) {\n return executeRename(document, operation);\n }\n\n if ('$unset' in operation) {\n return executeUnset(document, operation);\n }\n\n if ('$switch' in operation) {\n return executeSwitch(document, operation);\n }\n\n if ('$pipeline' in operation) {\n return executePipeline(document, operation, context);\n }\n\n if ('$copy' in operation) {\n return executeCopy(document, operation);\n }\n\n if ('$pipe' in operation) {\n if (!transformRegistry) {\n throw new Error('$pipe operation requires transform registry to be provided');\n }\n return executePipe(document, operation, transformRegistry);\n }\n\n // Unknown operation - return document unchanged\n return document;\n}\n\n/**\n * Validate a map pipeline\n * \n * Checks all operations for validity before execution.\n */\nexport function validateMapPipeline(pipeline: MapPipeline): ValidationResult {\n const errors: string[] = [];\n\n if (!Array.isArray(pipeline)) {\n return {\n valid: false,\n errors: ['Map pipeline must be an array'],\n };\n }\n\n if (pipeline.length === 0) {\n return {\n valid: false,\n errors: ['Map pipeline must have at least one operation'],\n };\n }\n\n for (let i = 0; i < pipeline.length; i++) {\n const operation = pipeline[i];\n \n if (!operation || typeof operation !== 'object') {\n errors.push(`Operation at index ${i} must be an object`);\n continue;\n }\n\n // Check that operation has exactly one operation key\n const operationKeys = Object.keys(operation);\n const validOperations = ['$set', '$rename', '$unset', '$switch', '$pipeline', '$copy', '$pipe'];\n const operationKey = operationKeys.find(key => validOperations.includes(key));\n\n if (!operationKey) {\n errors.push(\n `Operation at index ${i} must have one of: ${validOperations.join(', ')}`\n );\n continue;\n }\n\n if (operationKeys.length > 1) {\n errors.push(\n `Operation at index ${i} must have exactly one operation (found: ${operationKeys.join(', ')})`\n );\n continue;\n }\n\n // Validate specific operation\n const validation = validateOperation(operation);\n if (!validation.valid) {\n errors.push(\n `Operation at index ${i} (${operationKey}): ${validation.errors.join(', ')}`\n );\n }\n }\n\n return {\n valid: errors.length === 0,\n errors,\n };\n}\n\n/**\n * Validate a single operation\n */\nfunction validateOperation(operation: Operation): ValidationResult {\n if ('$set' in operation) {\n return validateSet(operation);\n }\n\n if ('$rename' in operation) {\n return validateRename(operation);\n }\n\n if ('$unset' in operation) {\n return validateUnset(operation);\n }\n\n if ('$switch' in operation) {\n return validateSwitch(operation);\n }\n\n if ('$pipeline' in operation) {\n return validatePipeline(operation);\n }\n\n if ('$copy' in operation) {\n return validateCopy(operation);\n }\n\n if ('$pipe' in operation) {\n return validatePipe(operation);\n }\n\n return {\n valid: false,\n errors: ['Unknown operation type'],\n };\n}\n\n/**\n * Split a map pipeline into schema operations and pipe operations.\n *\n * Convention:\n * - schema ops (everything except $pipe) operate on structured data\n * - $pipe ops can convert formats (e.g., json-to-toml) and are often applied later\n */\nexport function splitMapPipeline(pipeline: MapPipeline): { schemaOps: MapPipeline; pipeOps: MapPipeline } {\n const schemaOps: MapPipeline = [];\n const pipeOps: MapPipeline = [];\n\n for (const op of pipeline) {\n if (op && typeof op === 'object' && '$pipe' in op) {\n pipeOps.push(op);\n } else {\n schemaOps.push(op);\n }\n }\n\n return { schemaOps, pipeOps };\n}\n\n/**\n * Create a map context from file information\n */\nexport function createMapContext(options: {\n filename: string;\n dirname: string;\n path: string;\n ext: string;\n}): MapContext {\n return {\n filename: options.filename,\n dirname: options.dirname,\n path: options.path,\n ext: options.ext,\n };\n}\n", "/**\n * Flow Transforms\n *\n * Transform implementations for the flow execution pipeline.\n * Organized by category: Format Converters, Merge Strategies, Content Filters,\n * Markdown Processors, Value Transforms, Validation.\n *\n * ## Naming convention\n *\n * - **Bidirectional primitive** (`yaml`, `toml`, `markdown`): one registration\n * taking a `{ direction: 'parse' | 'stringify' }` option (default `'parse'`).\n * - **Explicit-pair aliases** (`yaml-to-json`, `json-to-yaml`): thin wrappers\n * that pre-set the direction.\n *\n * `$pipe` arrays accept bare names only (`pipe.ts` doesn't pass options), so\n * flow authors should always use the explicit-pair name in `platforms.jsonc`.\n */\n\nimport yaml from 'js-yaml';\nimport * as TOML from 'smol-toml';\nimport { logger } from '../../utils/logger.js';\nimport { parseMarkdownDocument, serializeMarkdownDocument } from './markdown.js';\n\n/**\n * Transform function interface\n */\nexport interface Transform {\n name: string;\n execute(input: any, options?: any): any;\n validate?(options?: any): boolean;\n}\n\n/**\n * Transform registry for managing and executing transforms\n */\nexport class TransformRegistry {\n private transforms = new Map<string, Transform>();\n\n /**\n * Register a transform\n */\n register(transform: Transform): void {\n this.transforms.set(transform.name, transform);\n }\n\n /**\n * Get a transform by name\n */\n get(name: string): Transform | undefined {\n return this.transforms.get(name);\n }\n\n /**\n * Check if transform exists\n */\n has(name: string): boolean {\n return this.transforms.has(name);\n }\n\n /**\n * Execute a transform by name\n */\n execute(name: string, input: any, options?: any): any {\n const transform = this.get(name);\n if (!transform) {\n throw new Error(`Transform not found: ${name}`);\n }\n\n // Validate options if validator exists\n if (transform.validate && !transform.validate(options)) {\n throw new Error(`Invalid options for transform: ${name}`);\n }\n\n return transform.execute(input, options);\n }\n\n /**\n * List all registered transform names\n */\n list(): string[] {\n return Array.from(this.transforms.keys());\n }\n}\n\n// ============================================================================\n// Format Converters\n// ============================================================================\n\n/**\n * Parse JSONC (JSON with comments) to object\n */\nexport const jsoncTransform: Transform = {\n name: 'jsonc',\n execute(input: string): any {\n if (typeof input !== 'string') {\n return input;\n }\n\n // Strip comments from JSONC\n const stripped = input\n // Remove single-line comments\n .replace(/\\/\\/.*$/gm, '')\n // Remove multi-line comments\n .replace(/\\/\\*[\\s\\S]*?\\*\\//g, '')\n // Remove trailing commas\n .replace(/,(\\s*[}\\]])/g, '$1');\n\n return JSON.parse(stripped);\n },\n};\n\n/**\n * Convert between YAML and object.\n *\n * Bidirectional primitive \u2014 flow authors should prefer the explicit-pair\n * aliases at `$pipe` call sites.\n *\n * @see jsonToYamlTransform\n * @see yamlToJsonTransform\n */\nexport const yamlTransform: Transform = {\n name: 'yaml',\n execute(input: any, options?: { direction?: 'parse' | 'stringify' }): any {\n const direction = options?.direction || 'parse';\n\n if (direction === 'parse') {\n if (typeof input !== 'string') {\n return input;\n }\n return yaml.load(input);\n } else {\n // stringify\n return yaml.dump(input, {\n indent: 2,\n flowLevel: 1, // Use compact flow style for arrays\n lineWidth: -1, // Disable line wrapping\n noRefs: true, // Disable anchors/aliases\n });\n }\n },\n};\n\n/**\n * Convert between TOML and object.\n *\n * Uses smol-toml for TOML v1.0.0 compliant serialization and parsing.\n *\n * Bidirectional primitive \u2014 flow authors should prefer the explicit-pair\n * aliases at `$pipe` call sites.\n *\n * @see jsonToTomlTransform\n * @see tomlToJsonTransform\n */\nexport const tomlTransform: Transform = {\n name: 'toml',\n execute(input: any, options?: { direction?: 'parse' | 'stringify' }): any {\n const direction = options?.direction || 'parse';\n\n if (direction === 'parse') {\n if (typeof input !== 'string') {\n return input;\n }\n try {\n return TOML.parse(input);\n } catch (error) {\n throw new Error(`TOML parse error: ${error instanceof Error ? error.message : String(error)}`);\n }\n } else {\n // stringify\n try {\n return TOML.stringify(input);\n } catch (error) {\n throw new Error(`TOML stringify error: ${error instanceof Error ? error.message : String(error)}`);\n }\n }\n },\n};\n\n/**\n * Convert JSON object to TOML string\n */\nexport const jsonToTomlTransform: Transform = {\n name: 'json-to-toml',\n execute(input: any): string {\n return tomlTransform.execute(input, { direction: 'stringify' });\n },\n};\n\n/**\n * Convert TOML string to JSON object\n */\nexport const tomlToJsonTransform: Transform = {\n name: 'toml-to-json',\n execute(input: string): any {\n return tomlTransform.execute(input, { direction: 'parse' });\n },\n};\n\n/**\n * Convert JSON object to YAML string.\n */\nexport const jsonToYamlTransform: Transform = {\n name: 'json-to-yaml',\n execute(input: any): string {\n return yamlTransform.execute(input, { direction: 'stringify' });\n },\n};\n\n/**\n * Convert YAML string to JSON object.\n */\nexport const yamlToJsonTransform: Transform = {\n name: 'yaml-to-json',\n execute(input: string): any {\n return yamlTransform.execute(input, { direction: 'parse' });\n },\n};\n\n// ============================================================================\n// Content Filters\n// ============================================================================\n\n/**\n * Remove comments from JSONC/YAML strings\n */\nexport const filterCommentsTransform: Transform = {\n name: 'filter-comments',\n execute(input: any): any {\n if (typeof input !== 'string') {\n return input;\n }\n\n return input\n .replace(/\\/\\/.*$/gm, '')\n .replace(/\\/\\*[\\s\\S]*?\\*\\//g, '')\n .replace(/^\\s*#.*$/gm, '');\n },\n};\n\n/**\n * Remove empty strings, arrays, and objects\n */\nexport const filterEmptyTransform: Transform = {\n name: 'filter-empty',\n execute(input: any, options?: { recursive?: boolean }): any {\n const recursive = options?.recursive ?? true;\n\n if (Array.isArray(input)) {\n const filtered = input\n .filter(item => {\n if (item === '' || (Array.isArray(item) && item.length === 0)) {\n return false;\n }\n if (typeof item === 'object' && item !== null && Object.keys(item).length === 0) {\n return false;\n }\n return true;\n })\n .map(item => recursive && typeof item === 'object' ? filterEmptyTransform.execute(item, options) : item);\n\n return filtered;\n }\n\n if (typeof input === 'object' && input !== null) {\n const filtered: any = {};\n for (const [key, value] of Object.entries(input)) {\n if (value === '' || (Array.isArray(value) && value.length === 0)) {\n continue;\n }\n if (typeof value === 'object' && value !== null && Object.keys(value).length === 0) {\n continue;\n }\n filtered[key] = recursive && typeof value === 'object' ? filterEmptyTransform.execute(value, options) : value;\n }\n return filtered;\n }\n\n return input;\n },\n};\n\n/**\n * Remove null and undefined values\n */\nexport const filterNullTransform: Transform = {\n name: 'filter-null',\n execute(input: any, options?: { recursive?: boolean }): any {\n const recursive = options?.recursive ?? true;\n\n if (Array.isArray(input)) {\n return input\n .filter(item => item !== null && item !== undefined)\n .map(item => recursive && typeof item === 'object' ? filterNullTransform.execute(item, options) : item);\n }\n\n if (typeof input === 'object' && input !== null) {\n const filtered: any = {};\n for (const [key, value] of Object.entries(input)) {\n if (value === null || value === undefined) {\n continue;\n }\n filtered[key] = recursive && typeof value === 'object' ? filterNullTransform.execute(value, options) : value;\n }\n return filtered;\n }\n\n return input;\n },\n};\n\n// ============================================================================\n// Markdown Transforms\n// ============================================================================\n\n/**\n * Split markdown by section headers\n */\nexport const sectionsTransform: Transform = {\n name: 'sections',\n execute(input: string, options?: { level?: number }): Record<string, string> {\n if (typeof input !== 'string') {\n return { content: String(input) };\n }\n\n const level = options?.level || 1;\n const headerRegex = new RegExp(`^#{${level}}\\\\s+(.+)$`, 'gm');\n const sections: Record<string, string> = {};\n\n let lastIndex = 0;\n let lastTitle = '_preamble';\n let match;\n\n while ((match = headerRegex.exec(input)) !== null) {\n // Save previous section\n if (lastIndex < match.index) {\n sections[lastTitle] = input.slice(lastIndex, match.index).trim();\n }\n\n // Start new section\n lastTitle = match[1].trim();\n lastIndex = match.index + match[0].length;\n }\n\n // Save final section\n if (lastIndex < input.length) {\n sections[lastTitle] = input.slice(lastIndex).trim();\n }\n\n return sections;\n },\n};\n\n/**\n * Convert between Markdown (with optional YAML frontmatter) and a structured\n * `{ frontmatter?, body }` object.\n *\n * Bidirectional primitive \u2014 flow authors should prefer the explicit-pair\n * aliases at `$pipe` call sites.\n *\n * Strict-by-default on invalid YAML frontmatter (parse direction throws).\n * This differs from {@link frontmatterTransform}, which silently returns\n * `{}` on parse failure \u2014 that helper is for inline extraction in\n * sub-pipelines, not for full document round-trips.\n *\n * @see jsonToMarkdownTransform\n * @see markdownToJsonTransform\n */\nexport const markdownTransform: Transform = {\n name: 'markdown',\n execute(input: any, options?: { direction?: 'parse' | 'stringify' }): any {\n const direction = options?.direction || 'parse';\n if (direction === 'parse') {\n if (typeof input !== 'string') {\n return input;\n }\n return parseMarkdownDocument(input);\n }\n return serializeMarkdownDocument(input);\n },\n};\n\n/**\n * Parse Markdown to a `{ frontmatter?, body }` object.\n */\nexport const markdownToJsonTransform: Transform = {\n name: 'markdown-to-json',\n execute(input: string): any {\n return markdownTransform.execute(input, { direction: 'parse' });\n },\n};\n\n/**\n * Serialize a `{ frontmatter?, body }` object back to Markdown.\n */\nexport const jsonToMarkdownTransform: Transform = {\n name: 'json-to-markdown',\n execute(input: any): string {\n return markdownTransform.execute(input, { direction: 'stringify' });\n },\n};\n\n/**\n * Extract YAML frontmatter from markdown\n */\nexport const frontmatterTransform: Transform = {\n name: 'frontmatter',\n execute(input: string): any {\n if (typeof input !== 'string') {\n return {};\n }\n\n const match = input.match(/^---\\s*\\n([\\s\\S]*?)\\n---\\s*\\n/);\n if (!match) {\n return {};\n }\n\n try {\n return yaml.load(match[1]) || {};\n } catch (error) {\n logger.warn(`Failed to parse frontmatter: ${error}`);\n return {};\n }\n },\n};\n\n/**\n * Extract markdown body (without frontmatter)\n */\nexport const bodyTransform: Transform = {\n name: 'body',\n execute(input: string): string {\n if (typeof input !== 'string') {\n return String(input);\n }\n\n const match = input.match(/^---\\s*\\n[\\s\\S]*?\\n---\\s*\\n([\\s\\S]*)$/);\n if (match) {\n return match[1].trim();\n }\n\n return input.trim();\n },\n};\n\n// ============================================================================\n// Value Transforms\n// ============================================================================\n\n/**\n * Type Converters\n */\n\nexport const numberTransform: Transform = {\n name: 'number',\n execute(input: any): number {\n const num = Number(input);\n if (isNaN(num)) {\n throw new Error(`Cannot convert to number: ${input}`);\n }\n return num;\n },\n};\n\nexport const stringTransform: Transform = {\n name: 'string',\n execute(input: any): string {\n return String(input);\n },\n};\n\nexport const booleanTransform: Transform = {\n name: 'boolean',\n execute(input: any): boolean {\n if (typeof input === 'boolean') return input;\n if (typeof input === 'string') {\n const lower = input.toLowerCase();\n if (lower === 'true' || lower === '1' || lower === 'yes') return true;\n if (lower === 'false' || lower === '0' || lower === 'no') return false;\n }\n return Boolean(input);\n },\n};\n\nexport const jsonTransform: Transform = {\n name: 'json',\n execute(input: any): any {\n if (typeof input === 'string') {\n return JSON.parse(input);\n }\n return input;\n },\n};\n\nexport const dateTransform: Transform = {\n name: 'date',\n execute(input: any): Date {\n const date = new Date(input);\n if (isNaN(date.getTime())) {\n throw new Error(`Cannot convert to date: ${input}`);\n }\n return date;\n },\n};\n\n/**\n * String Transforms\n */\n\nexport const uppercaseTransform: Transform = {\n name: 'uppercase',\n execute(input: any): string {\n return String(input).toUpperCase();\n },\n};\n\nexport const lowercaseTransform: Transform = {\n name: 'lowercase',\n execute(input: any): string {\n return String(input).toLowerCase();\n },\n};\n\nexport const trimTransform: Transform = {\n name: 'trim',\n execute(input: any): string {\n return String(input).trim();\n },\n};\n\nexport const titleCaseTransform: Transform = {\n name: 'title-case',\n execute(input: any): string {\n return String(input)\n .toLowerCase()\n .split(/\\s+/)\n .map(word => word.charAt(0).toUpperCase() + word.slice(1))\n .join(' ');\n },\n};\n\nexport const camelCaseTransform: Transform = {\n name: 'camel-case',\n execute(input: any): string {\n return String(input)\n .replace(/[-_\\s]+(.)?/g, (_, c) => c ? c.toUpperCase() : '')\n .replace(/^(.)/, (_, c) => c.toLowerCase());\n },\n};\n\nexport const kebabCaseTransform: Transform = {\n name: 'kebab-case',\n execute(input: any): string {\n return String(input)\n .replace(/([a-z])([A-Z])/g, '$1-$2')\n .replace(/[\\s_]+/g, '-')\n .toLowerCase();\n },\n};\n\nexport const snakeCaseTransform: Transform = {\n name: 'snake-case',\n execute(input: any): string {\n return String(input)\n .replace(/([a-z])([A-Z])/g, '$1_$2')\n .replace(/[\\s-]+/g, '_')\n .toLowerCase();\n },\n};\n\nexport const slugifyTransform: Transform = {\n name: 'slugify',\n execute(input: any): string {\n return String(input)\n .toLowerCase()\n .replace(/[^\\w\\s-]/g, '')\n .replace(/[\\s_]+/g, '-')\n .replace(/^-+|-+$/g, '');\n },\n};\n\n/**\n * Array Transforms\n */\n\nexport const arrayAppendTransform: Transform = {\n name: 'array-append',\n execute(input: any, options?: { value: any }): any[] {\n const arr = Array.isArray(input) ? input : [input];\n return [...arr, options?.value];\n },\n};\n\nexport const arrayUniqueTransform: Transform = {\n name: 'array-unique',\n execute(input: any): any[] {\n if (!Array.isArray(input)) {\n return [input];\n }\n return [...new Set(input)];\n },\n};\n\nexport const arrayFlattenTransform: Transform = {\n name: 'array-flatten',\n execute(input: any, options?: { depth?: number }): any[] {\n if (!Array.isArray(input)) {\n return [input];\n }\n const depth = options?.depth ?? Infinity;\n return input.flat(depth);\n },\n};\n\n/**\n * Object Transforms\n */\n\nexport const flattenTransform: Transform = {\n name: 'flatten',\n execute(input: any, options?: { separator?: string }): Record<string, any> {\n if (typeof input !== 'object' || input === null) {\n return { value: input };\n }\n\n const separator = options?.separator || '.';\n const result: Record<string, any> = {};\n\n function flatten(obj: any, prefix = '') {\n for (const [key, value] of Object.entries(obj)) {\n const newKey = prefix ? `${prefix}${separator}${key}` : key;\n \n if (typeof value === 'object' && value !== null && !Array.isArray(value)) {\n flatten(value, newKey);\n } else {\n result[newKey] = value;\n }\n }\n }\n\n flatten(input);\n return result;\n },\n};\n\nexport const unflattenTransform: Transform = {\n name: 'unflatten',\n execute(input: any, options?: { separator?: string }): any {\n if (typeof input !== 'object' || input === null) {\n return input;\n }\n\n const separator = options?.separator || '.';\n const result: any = {};\n\n for (const [path, value] of Object.entries(input)) {\n const keys = path.split(separator);\n let current = result;\n\n for (let i = 0; i < keys.length - 1; i++) {\n const key = keys[i];\n if (!(key in current)) {\n current[key] = {};\n }\n current = current[key];\n }\n\n current[keys[keys.length - 1]] = value;\n }\n\n return result;\n },\n};\n\nexport const pickKeysTransform: Transform = {\n name: 'pick-keys',\n execute(input: any, options?: { keys: string[] }): any {\n if (typeof input !== 'object' || input === null) {\n return input;\n }\n\n const keys = options?.keys || [];\n const result: any = {};\n\n for (const key of keys) {\n if (key in input) {\n result[key] = input[key];\n }\n }\n\n return result;\n },\n validate(options?: { keys: string[] }): boolean {\n return Array.isArray(options?.keys);\n },\n};\n\nexport const omitKeysTransform: Transform = {\n name: 'omit-keys',\n execute(input: any, options?: { keys: string[] }): any {\n if (typeof input !== 'object' || input === null) {\n return input;\n }\n\n const keys = options?.keys || [];\n const result: any = { ...input };\n\n for (const key of keys) {\n delete result[key];\n }\n\n return result;\n },\n validate(options?: { keys: string[] }): boolean {\n return Array.isArray(options?.keys);\n },\n};\n\n// ============================================================================\n// Validation Transforms\n// ============================================================================\n\nexport const validateTransform: Transform = {\n name: 'validate',\n execute(input: any, options?: { required?: string[] }): any {\n if (typeof input !== 'object' || input === null) {\n throw new Error('Validation failed: input must be an object');\n }\n\n const required = options?.required || [];\n const missing = required.filter(key => !(key in input));\n\n if (missing.length > 0) {\n throw new Error(`Validation failed: missing required keys: ${missing.join(', ')}`);\n }\n\n return input;\n },\n};\n\n// ============================================================================\n// Transform Registry Setup\n// ============================================================================\n\n/**\n * Create and populate default transform registry\n */\nexport function createDefaultTransformRegistry(): TransformRegistry {\n const registry = new TransformRegistry();\n\n // Format converters\n registry.register(jsoncTransform);\n registry.register(yamlTransform);\n registry.register(tomlTransform);\n registry.register(jsonToTomlTransform);\n registry.register(tomlToJsonTransform);\n registry.register(jsonToYamlTransform);\n registry.register(yamlToJsonTransform);\n\n // Content filters\n registry.register(filterCommentsTransform);\n registry.register(filterEmptyTransform);\n registry.register(filterNullTransform);\n\n // Markdown transforms\n registry.register(sectionsTransform);\n registry.register(markdownTransform);\n registry.register(markdownToJsonTransform);\n registry.register(jsonToMarkdownTransform);\n registry.register(frontmatterTransform);\n registry.register(bodyTransform);\n\n // Type converters\n registry.register(numberTransform);\n registry.register(stringTransform);\n registry.register(booleanTransform);\n registry.register(jsonTransform);\n registry.register(dateTransform);\n\n // String transforms\n registry.register(uppercaseTransform);\n registry.register(lowercaseTransform);\n registry.register(trimTransform);\n registry.register(titleCaseTransform);\n registry.register(camelCaseTransform);\n registry.register(kebabCaseTransform);\n registry.register(snakeCaseTransform);\n registry.register(slugifyTransform);\n\n // Array transforms\n registry.register(arrayAppendTransform);\n registry.register(arrayUniqueTransform);\n registry.register(arrayFlattenTransform);\n\n // Object transforms\n registry.register(flattenTransform);\n registry.register(unflattenTransform);\n registry.register(pickKeysTransform);\n registry.register(omitKeysTransform);\n\n // Validation\n registry.register(validateTransform);\n\n return registry;\n}\n\n/**\n * Global transform registry instance\n */\nexport const defaultTransformRegistry = createDefaultTransformRegistry();\n", "import yaml from 'js-yaml';\n\nexport type MarkdownDocument = {\n frontmatter?: any;\n body: string;\n};\n\n/**\n * Parse markdown with optional YAML frontmatter.\n *\n * Notes:\n * - When frontmatter is present but invalid YAML:\n * - lenient=false (default): throws\n * - lenient=true: returns { body: originalContent } (treat as plain markdown)\n */\nexport function parseMarkdownDocument(\n content: string,\n options?: { lenient?: boolean }\n): MarkdownDocument {\n const frontmatterRegex = /^---\\s*\\n([\\s\\S]*?)\\n---\\s*\\n([\\s\\S]*)$/;\n const match = content.match(frontmatterRegex);\n\n if (!match) {\n return { body: content };\n }\n\n const [, frontmatterRaw, body] = match;\n\n try {\n const frontmatter = (yaml.load(frontmatterRaw) ?? {}) as any;\n return { frontmatter, body };\n } catch (error) {\n if (options?.lenient) {\n return { body: content };\n }\n throw error;\n }\n}\n\n/**\n * Serialize markdown content (optionally with YAML frontmatter).\n *\n * Uses consistent YAML dump settings (flowLevel=1 for compact arrays).\n */\nexport function serializeMarkdownDocument(content: any): string {\n if (typeof content === 'string') {\n return content;\n }\n\n const body = typeof content?.body === 'string' ? content.body : '';\n const hasFrontmatter = content && typeof content === 'object' && 'frontmatter' in content && content.frontmatter;\n\n if (!hasFrontmatter) {\n return body;\n }\n\n const frontmatterYaml = yaml.dump(content.frontmatter, {\n indent: 2,\n flowLevel: -1,\n lineWidth: -1,\n noRefs: true,\n });\n\n // Normalize with a blank line between frontmatter and body (common markdown convention).\n return `---\\n${frontmatterYaml}---\\n\\n${body}`;\n}\n\n", "import { extname, basename, dirname } from 'path';\nimport { stripPlatformSuffixFromFilename } from './flows/platform-suffix-handler.js';\n\nfunction normalizeSlashPath(input: string): string {\n return input.replace(/\\\\/g, '/');\n}\n\nfunction splitSegments(input: string): string[] {\n const normalized = normalizeSlashPath(input);\n return normalized.split('/').filter(Boolean);\n}\n\n/**\n * When mapping a source path into a destination base (e.g. `skills/foo.txt` into `.cursor/skills/**`),\n * avoid duplicating overlapping anchor segments (e.g. `.cursor/skills/skills/foo.txt`).\n *\n * This finds the longest overlap where a suffix of `destBase` equals a prefix of `sourceRel`,\n * then strips that prefix from `sourceRel`.\n */\nexport function stripOverlappingDestBaseFromSource(\n destBase: string,\n sourceRelFromPackage: string\n): string {\n const destSegments = splitSegments(destBase);\n const sourceSegments = splitSegments(sourceRelFromPackage);\n\n const maxOverlap = Math.min(destSegments.length, sourceSegments.length);\n let overlapLen = 0;\n\n for (let k = maxOverlap; k >= 1; k--) {\n const destSuffix = destSegments.slice(-k).join('/');\n const sourcePrefix = sourceSegments.slice(0, k).join('/');\n if (destSuffix === sourcePrefix) {\n overlapLen = k;\n break;\n }\n }\n\n return sourceSegments.slice(overlapLen).join('/');\n}\n\nfunction extractTargetExtensionFromRecursiveSuffix(toSuffix: string): string | null {\n const normalized = normalizeSlashPath(toSuffix);\n const match = normalized.match(/(\\.[^/]+)$/);\n return match?.[1] ?? null;\n}\n\nfunction trimSourceToAnchorSegment(\n sourceRelFromPackage: string,\n anchorSegment: string\n): string {\n const sourceSegments = splitSegments(sourceRelFromPackage);\n const anchorIndex = sourceSegments.lastIndexOf(anchorSegment);\n\n if (anchorIndex === -1) {\n return normalizeSlashPath(sourceRelFromPackage);\n }\n\n return sourceSegments.slice(anchorIndex + 1).join('/');\n}\n\n/**\n * Resolve the workspace-relative target path for patterns containing `**`.\n *\n * - Preserves nested subdirectories from the source.\n * - Prevents accidental duplication when the destination base overlaps the source prefix.\n * - Supports basic extension remapping (e.g. `** / *.md` -> `** / *.mdc`).\n * - Strips platform suffixes from filenames (e.g. `read-specs.claude.md` -> `read-specs.md`).\n */\nexport function resolveRecursiveGlobTargetRelativePath(\n sourceRelFromPackage: string,\n fromPattern: string,\n toPattern: string\n): string {\n const toParts = normalizeSlashPath(toPattern).split('**');\n const toBase = toParts[0].replace(/\\/$/, '');\n const toSuffix = toParts[1] || '';\n\n let relativeSubpath = sourceRelFromPackage;\n\n if (fromPattern.includes('**')) {\n const fromParts = normalizeSlashPath(fromPattern).split('**');\n const fromBase = fromParts[0].replace(/\\/$/, '');\n const fromSuffix = fromParts[1] || '';\n\n if (fromBase) {\n const normalizedSource = normalizeSlashPath(sourceRelFromPackage);\n relativeSubpath = normalizedSource.startsWith(fromBase + '/')\n ? normalizedSource.slice(fromBase.length + 1)\n : normalizedSource;\n } else if (toBase) {\n const toBaseSegments = splitSegments(toBase);\n const anchorSegment = toBaseSegments[toBaseSegments.length - 1];\n\n if (anchorSegment) {\n relativeSubpath = trimSourceToAnchorSegment(sourceRelFromPackage, anchorSegment);\n }\n }\n\n // Handle extension mapping if suffixes specify extensions: /**/*.md -> /**/*.mdc\n if (fromSuffix && toSuffix) {\n const fromExt = fromSuffix.replace(/^\\/?\\*+/, '');\n const toExt = toSuffix.replace(/^\\/?\\*+/, '');\n if (fromExt && toExt && fromExt !== toExt) {\n relativeSubpath = relativeSubpath.replace(\n new RegExp(fromExt.replace('.', '\\\\.') + '$'),\n toExt\n );\n }\n }\n } else {\n // We don't have a recursive `fromPattern`, so preserve the full source path but avoid\n // duplicating any overlapping \"anchor\" segments with the destination base.\n relativeSubpath = toBase\n ? stripOverlappingDestBaseFromSource(toBase, sourceRelFromPackage)\n : normalizeSlashPath(sourceRelFromPackage);\n\n const toExt = extractTargetExtensionFromRecursiveSuffix(toSuffix);\n if (toExt && toExt.startsWith('.')) {\n const currentExt = extname(relativeSubpath);\n if (currentExt && currentExt !== toExt) {\n relativeSubpath = relativeSubpath.slice(0, -currentExt.length) + toExt;\n }\n }\n }\n\n // Strip platform suffix from filename (e.g. read-specs.claude.md -> read-specs.md)\n // This must be done before constructing the final path\n relativeSubpath = stripPlatformSuffixFromFilename(relativeSubpath);\n\n return toBase ? normalizeSlashPath(`${toBase}/${relativeSubpath}`) : normalizeSlashPath(relativeSubpath);\n}\n", "/**\n * Format Group Merger Module\n * \n * Merges converted format groups into a unified package structure.\n * Handles path conflicts and deduplication.\n * \n * Phase 3: Per-File Import Flow Application\n */\n\nimport { logger } from '../../utils/logger.js';\nimport type { PackageFile, PlatformId, SpecialFormat } from './detection-types.js';\n\n/**\n * Validation result for merged package\n */\nexport interface ValidationResult {\n /** Whether validation passed */\n valid: boolean;\n \n /** Validation errors (if any) */\n errors: string[];\n \n /** Validation warnings (if any) */\n warnings: string[];\n}\n\n/**\n * Merge format groups into unified package\n * \n * Combines all converted format groups into a single array of files in universal format.\n * Handles path conflicts using priority-based deduplication.\n * \n * @param groups - Map of platform ID -> converted files\n * @returns Unified array of files in universal format\n */\nexport function mergeFormatGroups(\n groups: Map<PlatformId | SpecialFormat, PackageFile[]>\n): PackageFile[] {\n // Collect all files from all groups\n const allFiles: PackageFile[] = [];\n \n for (const [platformId, files] of groups) {\n allFiles.push(...files);\n }\n \n // Deduplicate paths with priority-based resolution\n const deduplicated = deduplicatePaths(allFiles);\n \n return deduplicated;\n}\n\n/**\n * Deduplicate files by path using priority ordering\n * \n * Priority (highest to lowest):\n * 1. Universal format (most canonical)\n * 2. Non-universal formats (first occurrence wins)\n * \n * @param files - Array of files (potentially with duplicate paths)\n * @returns Deduplicated array\n */\nexport function deduplicatePaths(files: PackageFile[]): PackageFile[] {\n const pathMap = new Map<string, PackageFile>();\n \n for (const file of files) {\n const existing = pathMap.get(file.path);\n \n if (!existing) {\n // First occurrence - add to map\n pathMap.set(file.path, file);\n continue;\n }\n \n // Duplicate path - apply priority rules\n const priority = determinePriority(file, existing);\n \n if (priority === 'new') {\n pathMap.set(file.path, file);\n }\n }\n \n return Array.from(pathMap.values());\n}\n\n/**\n * Determine which file has priority for the same path\n * \n * @param newFile - New file being considered\n * @param existingFile - Existing file in the map\n * @returns Which file to keep ('new' or 'existing')\n */\nfunction determinePriority(\n newFile: PackageFile,\n existingFile: PackageFile\n): 'new' | 'existing' {\n // Priority 1: Universal format content\n // Check if files have universal format indicators\n \n const newIsUniversal = isLikelyUniversalFormat(newFile);\n const existingIsUniversal = isLikelyUniversalFormat(existingFile);\n \n if (newIsUniversal && !existingIsUniversal) {\n return 'new';\n }\n \n if (existingIsUniversal && !newIsUniversal) {\n return 'existing';\n }\n \n // Priority 2: First occurrence wins (existing)\n return 'existing';\n}\n\n/**\n * Check if file is likely in universal format\n * \n * Heuristic check based on frontmatter structure.\n * Universal format uses:\n * - tools: array\n * - permissions: object\n * - model: prefixed (anthropic/...)\n * \n * @param file - File to check\n * @returns Whether file is likely universal format\n */\nfunction isLikelyUniversalFormat(file: PackageFile): boolean {\n if (!file.frontmatter) {\n // No frontmatter - could be universal (e.g., skills)\n return true;\n }\n \n const fm = file.frontmatter;\n \n // Check tools field\n if ('tools' in fm) {\n // Universal uses array\n if (Array.isArray(fm.tools)) {\n return true;\n }\n // Platform-specific uses string or object\n return false;\n }\n \n // Check permissions field\n if ('permissions' in fm) {\n // Universal uses object\n if (typeof fm.permissions === 'object' && fm.permissions !== null) {\n return true;\n }\n }\n \n // Check for platform-specific exclusive fields\n const platformExclusiveFields = [\n 'permissionMode', // Claude\n 'hooks', // Claude\n 'skills', // Claude\n 'temperature', // OpenCode\n 'maxSteps', // OpenCode\n 'disabled' // OpenCode\n ];\n \n for (const field of platformExclusiveFields) {\n if (field in fm) {\n // Has platform-specific field - not universal\n return false;\n }\n }\n \n // No clear indicators - assume universal\n return true;\n}\n\n/**\n * Validate merged package structure\n * \n * Checks for:\n * - No duplicate paths remaining\n * - All files have content or frontmatter\n * - Paths are valid\n * \n * @param files - Merged file array\n * @returns Validation result\n */\nexport function validateMergedPackage(files: PackageFile[]): ValidationResult {\n const errors: string[] = [];\n const warnings: string[] = [];\n \n // Check for duplicates\n const paths = new Set<string>();\n for (const file of files) {\n if (paths.has(file.path)) {\n errors.push(`Duplicate path after merge: ${file.path}`);\n }\n paths.add(file.path);\n }\n \n // Check file validity\n for (const file of files) {\n // Check path is non-empty\n if (!file.path || file.path.trim() === '') {\n errors.push('File with empty path found');\n continue;\n }\n \n // Check file has either content or frontmatter\n if (!file.content && !file.frontmatter) {\n warnings.push(`File ${file.path} has no content or frontmatter`);\n }\n \n // Check for absolute paths (should be relative)\n if (file.path.startsWith('/')) {\n warnings.push(`File has absolute path: ${file.path}`);\n }\n }\n \n const valid = errors.length === 0;\n \n if (!valid) {\n logger.error('Merged package validation failed', { errors, warnings });\n } else if (warnings.length > 0) {\n logger.warn('Merged package validation warnings', { warnings });\n }\n \n return {\n valid,\n errors,\n warnings\n };\n}\n\n/**\n * Get statistics about merged package\n * \n * @param files - Merged file array\n * @returns Package statistics\n */\nexport function getMergedPackageStats(files: PackageFile[]): {\n totalFiles: number;\n filesWithFrontmatter: number;\n filesWithContent: number;\n uniquePaths: number;\n} {\n const paths = new Set<string>();\n let filesWithFrontmatter = 0;\n let filesWithContent = 0;\n \n for (const file of files) {\n paths.add(file.path);\n \n if (file.frontmatter && Object.keys(file.frontmatter).length > 0) {\n filesWithFrontmatter++;\n }\n \n if (file.content && file.content.trim().length > 0) {\n filesWithContent++;\n }\n }\n \n return {\n totalFiles: files.length,\n filesWithFrontmatter,\n filesWithContent,\n uniquePaths: paths.size\n };\n}\n", "/**\n * Conversion Context Module\n * \n * Tracks conversion state and metadata during per-file/per-group conversion.\n * Provides context for conversion operations and error tracking.\n * \n * Phase 3: Per-File Import Flow Application\n */\n\nimport { logger } from '../../utils/logger.js';\nimport type { Flow } from '../../types/flows.js';\nimport type { \n PackageFile,\n PlatformId,\n SpecialFormat,\n FormatGroup\n} from './detection-types.js';\n\n/**\n * Conversion context for tracking state during conversion\n * \n * Maintains:\n * - Original format groups from detection\n * - Conversion results per group\n * - Errors per file\n * - Cached import flows per platform\n * - Overall statistics\n */\nexport interface ConversionContext {\n /**\n * Original format groups (keyed by dynamic platform ID)\n * Input to conversion process\n */\n formatGroups: Map<PlatformId | SpecialFormat, PackageFile[]>;\n \n /**\n * Conversion results per group (keyed by dynamic platform ID)\n * Output from conversion process\n */\n convertedGroups: Map<PlatformId | SpecialFormat, PackageFile[]>;\n \n /**\n * Conversion errors per file path\n * Tracks which files failed and why\n */\n errors: Map<string, Error>;\n \n /**\n * Conversion metadata and statistics\n */\n metadata: ConversionMetadata;\n \n /**\n * Platform import flows cache (keyed by dynamic platform ID)\n * Loaded flows cached for reuse across files\n */\n importFlowsCache: Map<PlatformId, Flow[]>;\n}\n\n/**\n * Conversion metadata and statistics\n */\nexport interface ConversionMetadata {\n /** Total files in package */\n totalFiles: number;\n \n /** Files successfully converted */\n convertedFiles: number;\n \n /** Files skipped (already universal or no flows) */\n skippedFiles: number;\n \n /** Files that failed conversion */\n failedFiles: number;\n \n /** Start time of conversion */\n startTime: number;\n \n /** End time of conversion (if complete) */\n endTime?: number;\n \n /** Duration in milliseconds (if complete) */\n durationMs?: number;\n}\n\n/**\n * Create a new conversion context from format groups\n * \n * @param formatGroups - Format groups from detection phase\n * @returns Initialized conversion context\n */\nexport function createConversionContext(\n formatGroups: Map<PlatformId | SpecialFormat, PackageFile[]>\n): ConversionContext {\n // Count total files\n const totalFiles = Array.from(formatGroups.values())\n .reduce((sum, files) => sum + files.length, 0);\n \n return {\n formatGroups,\n convertedGroups: new Map(),\n errors: new Map(),\n metadata: {\n totalFiles,\n convertedFiles: 0,\n skippedFiles: 0,\n failedFiles: 0,\n startTime: Date.now()\n },\n importFlowsCache: new Map()\n };\n}\n\n/**\n * Record successful conversion for a group\n * \n * @param context - Conversion context to update\n * @param platformId - Platform ID of converted group\n * @param convertedFiles - Converted files\n * @param filesConverted - Number of files successfully converted\n * @param filesSkipped - Number of files skipped\n */\nexport function recordGroupConversion(\n context: ConversionContext,\n platformId: PlatformId | SpecialFormat,\n convertedFiles: PackageFile[],\n filesConverted: number,\n filesSkipped: number\n): void {\n context.convertedGroups.set(platformId, convertedFiles);\n context.metadata.convertedFiles += filesConverted;\n context.metadata.skippedFiles += filesSkipped;\n}\n\n/**\n * Record conversion error for a file\n * \n * @param context - Conversion context to update\n * @param filePath - Path of file that failed\n * @param error - Error that occurred\n */\nexport function recordConversionError(\n context: ConversionContext,\n filePath: string,\n error: Error\n): void {\n context.errors.set(filePath, error);\n context.metadata.failedFiles++;\n \n logger.debug(`Recorded conversion error for ${filePath}`, error);\n}\n\n/**\n * Mark conversion as complete\n * \n * Updates metadata with end time and duration.\n * \n * @param context - Conversion context to finalize\n */\nexport function finalizeConversion(context: ConversionContext): void {\n context.metadata.endTime = Date.now();\n context.metadata.durationMs = context.metadata.endTime - context.metadata.startTime;\n}\n\n/**\n * Get conversion summary\n * \n * @param context - Conversion context\n * @returns Human-readable summary\n */\nexport function getConversionSummary(context: ConversionContext): string {\n const { totalFiles, convertedFiles, skippedFiles, failedFiles, durationMs } = context.metadata;\n \n const parts: string[] = [];\n parts.push(`Total: ${totalFiles} files`);\n parts.push(`Converted: ${convertedFiles}`);\n parts.push(`Skipped: ${skippedFiles}`);\n \n if (failedFiles > 0) {\n parts.push(`Failed: ${failedFiles}`);\n }\n \n if (durationMs !== undefined) {\n parts.push(`Duration: ${durationMs}ms`);\n }\n \n return parts.join(', ');\n}\n\n/**\n * Check if conversion was successful\n * \n * @param context - Conversion context\n * @returns Whether all files were converted successfully\n */\nexport function isConversionSuccessful(context: ConversionContext): boolean {\n return context.metadata.failedFiles === 0;\n}\n\n/**\n * Get all conversion errors\n * \n * @param context - Conversion context\n * @returns Array of [filePath, error] tuples\n */\nexport function getConversionErrors(\n context: ConversionContext\n): Array<[string, Error]> {\n return Array.from(context.errors.entries());\n}\n\n/**\n * Cache import flows for a platform\n * \n * @param context - Conversion context\n * @param platformId - Platform ID\n * @param flows - Import flows to cache\n */\nexport function cacheImportFlows(\n context: ConversionContext,\n platformId: PlatformId,\n flows: Flow[]\n): void {\n context.importFlowsCache.set(platformId, flows);\n}\n\n/**\n * Get cached import flows for a platform\n * \n * @param context - Conversion context\n * @param platformId - Platform ID\n * @returns Cached flows or null if not cached\n */\nexport function getCachedImportFlows(\n context: ConversionContext,\n platformId: PlatformId\n): Flow[] | null {\n return context.importFlowsCache.get(platformId) || null;\n}\n\n/**\n * Create format groups from enhanced package format\n * \n * Helper to convert from detection result to format groups structure.\n * \n * @param files - All package files\n * @param formatGroups - Map of platform -> file paths from detection\n * @returns Map of platform -> files\n */\nexport function createFormatGroupsFromPaths(\n files: PackageFile[],\n formatGroups: Map<PlatformId | SpecialFormat, string[]>\n): Map<PlatformId | SpecialFormat, PackageFile[]> {\n const result = new Map<PlatformId | SpecialFormat, PackageFile[]>();\n \n // Create file lookup map\n const fileMap = new Map<string, PackageFile>();\n for (const file of files) {\n fileMap.set(file.path, file);\n }\n \n // Build format groups with actual file objects\n for (const [platformId, filePaths] of formatGroups) {\n const groupFiles: PackageFile[] = [];\n \n for (const filePath of filePaths) {\n const file = fileMap.get(filePath);\n if (file) {\n groupFiles.push(file);\n } else {\n logger.warn(`File not found in package: ${filePath}`);\n }\n }\n \n if (groupFiles.length > 0) {\n result.set(platformId, groupFiles);\n }\n }\n \n return result;\n}\n", "/**\n * Conversion Coordinator Module\n * \n * Orchestrates format detection and pre-conversion for packages.\n * Integrates Phase 2 detection and Phase 3 conversion into the installation pipeline.\n * \n * Phase 4: Integration with Existing Pipeline\n */\n\nimport { logger } from '../../utils/logger.js';\nimport { detectEnhancedPackageFormat } from './format-detector.js';\nimport { convertFormatGroup } from './import-flow-converter.js';\nimport { mergeFormatGroups } from './format-group-merger.js';\nimport { \n createConversionContext, \n recordGroupConversion, \n recordConversionError,\n finalizeConversion,\n type ConversionContext \n} from './conversion-context.js';\nimport type { \n PackageFile as DetectionPackageFile,\n EnhancedPackageFormat,\n FormatGroup,\n PlatformId,\n SpecialFormat\n} from './detection-types.js';\nimport type { InstallOptions, PackageFile } from '../../types/index.js';\n\n/**\n * Result of package conversion coordination\n */\nexport interface PackageConversionResult {\n /**\n * Whether package was converted\n * False if package was already universal or no conversion needed\n */\n wasConverted: boolean;\n \n /**\n * Enhanced format detection result\n */\n formatDetection: EnhancedPackageFormat;\n \n /**\n * Converted files (universal format)\n * If not converted, contains original files\n */\n files: PackageFile[];\n \n /**\n * Conversion context (if conversion occurred)\n */\n conversionContext?: ConversionContext;\n \n /**\n * Conversion errors (if any)\n */\n errors: Error[];\n \n /**\n * Warnings (non-fatal issues)\n */\n warnings: string[];\n}\n\n/**\n * Options for conversion coordination\n */\nexport interface ConversionOptions {\n /**\n * Target directory for platform config loading\n */\n targetDir?: string;\n \n /**\n * Force conversion even if already universal\n */\n forceConversion?: boolean;\n \n /**\n * Skip conversion entirely (passthrough)\n */\n skipConversion?: boolean;\n}\n\n/**\n * Coordinate format detection and pre-conversion for a package\n * \n * This is the main entry point for Phase 4 integration.\n * \n * Flow:\n * 1. Detect package format (Tier 1: markers, Tier 2: per-file)\n * 2. Determine if conversion is needed\n * 3. If needed, apply per-group conversion\n * 4. Merge converted groups\n * 5. Return unified result\n * \n * @param files - Package files to process\n * @param contentRoot - Package content root path\n * @param options - Conversion options\n * @returns Conversion result with converted files\n */\nexport async function coordinateConversion(\n files: DetectionPackageFile[],\n contentRoot: string,\n options: ConversionOptions = {}\n): Promise<PackageConversionResult> {\n const errors: Error[] = [];\n const warnings: string[] = [];\n \n try {\n // Skip conversion if requested\n if (options.skipConversion) {\n return {\n wasConverted: false,\n formatDetection: createSkipDetectionResult(files),\n files: convertToMainPackageFiles(files),\n errors: [],\n warnings: []\n };\n }\n \n // Step 1: Enhanced format detection (Phase 2)\n // Cast to main PackageFile type since detection uses its own PackageFile interface\n const formatDetection = await detectEnhancedPackageFormat(files as any);\n \n logger.info('Format detection complete', {\n packageFormat: formatDetection.packageFormat,\n detectionMethod: formatDetection.detectionMethod,\n confidence: formatDetection.confidence,\n totalFiles: formatDetection.analysis.totalFiles\n });\n \n // Step 2: Determine if conversion is needed\n const needsConversion = shouldPreConvert(formatDetection, options);\n \n if (!needsConversion) {\n // Still convert platform-specific groups when package is mostly universal\n // (e.g. agents/foo.opencode.md in otherwise-universal package)\n const hasPlatformSpecificGroups = (formatDetection.formatGroups?.size ?? 0) > 0 &&\n Array.from(formatDetection.formatGroups?.keys() ?? []).some(\n k => k !== 'universal' && k !== 'unknown'\n );\n if (!hasPlatformSpecificGroups) {\n return {\n wasConverted: false,\n formatDetection,\n files: convertToMainPackageFiles(files),\n errors: [],\n warnings: []\n };\n }\n }\n \n // Step 3: Perform pre-conversion (Phase 3)\n logger.info('Pre-converting package to universal format', {\n packageFormat: formatDetection.packageFormat,\n groupCount: formatDetection.formatGroups?.size || 0\n });\n \n const conversionResult = await preConvertPackage(\n files,\n formatDetection,\n options.targetDir\n );\n \n return {\n wasConverted: true,\n formatDetection,\n files: convertToMainPackageFiles(conversionResult.files),\n conversionContext: conversionResult.context,\n errors: conversionResult.errors,\n warnings: conversionResult.warnings\n };\n \n } catch (error) {\n logger.error('Conversion coordination failed', { error });\n errors.push(error instanceof Error ? error : new Error(String(error)));\n \n // Return original files on error (graceful degradation)\n return {\n wasConverted: false,\n formatDetection: createErrorDetectionResult(files, error),\n files: convertToMainPackageFiles(files),\n errors,\n warnings\n };\n }\n}\n\n\n\n/**\n * Determine if package needs pre-conversion\n * \n * Conversion needed if:\n * - Package format is platform-specific (not universal)\n * - Package format is mixed (multiple platforms)\n * - Force conversion is enabled\n * \n * @param format - Enhanced format detection result\n * @param options - Conversion options\n * @returns Whether conversion is needed\n */\nexport function shouldPreConvert(\n format: EnhancedPackageFormat,\n options: ConversionOptions = {}\n): boolean {\n // Force conversion if requested\n if (options.forceConversion) {\n return true;\n }\n \n // Skip if already universal\n if (format.packageFormat === 'universal') {\n return false;\n }\n \n // Convert if platform-specific or mixed\n if (format.packageFormat !== 'unknown') {\n return true;\n }\n \n // Unknown format - don't convert (let existing flow handle it)\n return false;\n}\n\n/**\n * Pre-convert package to universal format (Phase 3)\n * \n * Applies import flows per format group and merges results.\n */\nasync function preConvertPackage(\n files: DetectionPackageFile[],\n formatDetection: EnhancedPackageFormat,\n targetDir?: string\n): Promise<{\n files: DetectionPackageFile[];\n context: ConversionContext;\n errors: Error[];\n warnings: string[];\n}> {\n const errors: Error[] = [];\n const warnings: string[] = [];\n \n // Get format groups from detection (paths only)\n const formatGroupPaths = formatDetection.formatGroups;\n \n if (!formatGroupPaths || formatGroupPaths.size === 0) {\n logger.warn('No format groups found for conversion');\n return {\n files,\n context: createConversionContext(new Map()),\n errors: [],\n warnings: ['No format groups found for conversion']\n };\n }\n \n // Create file lookup map for efficient access\n const fileMap = new Map<string, DetectionPackageFile>();\n for (const file of files) {\n fileMap.set(file.path, file);\n }\n \n // Convert path groups to PackageFile groups\n const formatGroups = new Map<PlatformId | SpecialFormat, DetectionPackageFile[]>();\n for (const [platformId, paths] of formatGroupPaths.entries()) {\n const groupFiles: DetectionPackageFile[] = [];\n for (const path of paths) {\n const file = fileMap.get(path);\n if (file) {\n groupFiles.push(file);\n } else {\n logger.warn(`File not found in file map: ${path}`);\n }\n }\n if (groupFiles.length > 0) {\n formatGroups.set(platformId, groupFiles);\n }\n }\n \n // Create conversion context\n const conversionContext = createConversionContext(formatGroups);\n \n // Convert each format group\n const convertedGroups = new Map<PlatformId | SpecialFormat, PackageFile[]>();\n\n for (const [platformId, groupFiles] of formatGroups.entries()) {\n try {\n // Create format group object\n const formatGroup: FormatGroup = {\n platformId,\n files: groupFiles,\n confidence: 1.0 // Use detection confidence if available\n };\n \n // Convert group using Phase 3 converter\n const groupResult = convertFormatGroup(formatGroup, targetDir);\n \n // Record conversion in context\n // Type cast needed since detection-types and main types are structurally compatible but nominally different\n recordGroupConversion(\n conversionContext,\n platformId,\n groupResult.convertedFiles as any,\n groupResult.filesConverted,\n groupResult.filesProcessed - groupResult.filesConverted\n );\n \n // Collect errors\n for (const fileResult of groupResult.fileResults) {\n if (!fileResult.success && fileResult.error) {\n recordConversionError(\n conversionContext,\n fileResult.original.path,\n fileResult.error\n );\n errors.push(fileResult.error);\n }\n }\n \n // Add to converted groups\n // Type cast needed since detection-types and main types are structurally compatible but nominally different\n convertedGroups.set(platformId, groupResult.convertedFiles as any);\n \n } catch (error) {\n logger.error(`Failed to convert format group: ${platformId}`, { error });\n const err = error instanceof Error ? error : new Error(String(error));\n errors.push(err);\n warnings.push(`Failed to convert ${platformId} format group: ${err.message}`);\n \n // Record error for all files in group\n for (const file of groupFiles) {\n recordConversionError(conversionContext, file.path, err);\n }\n }\n }\n \n // Merge converted groups (Phase 3)\n const mergedFiles = mergeFormatGroups(convertedGroups);\n \n // Finalize conversion context\n finalizeConversion(conversionContext);\n \n logger.info('Pre-conversion complete', {\n totalFiles: mergedFiles.length,\n convertedFiles: conversionContext.metadata.convertedFiles,\n failedFiles: conversionContext.metadata.failedFiles,\n durationMs: conversionContext.metadata.durationMs\n });\n \n // Convert detection PackageFile to main PackageFile (ensure content is present)\n const convertedFiles: PackageFile[] = mergedFiles.map(f => ({\n path: f.path,\n content: f.content || '', // Should always be present, but fallback to empty string\n ...(f.frontmatter && { frontmatter: f.frontmatter })\n })) as PackageFile[];\n \n return {\n files: convertedFiles,\n context: conversionContext,\n errors,\n warnings\n };\n}\n\n/**\n * Convert detection PackageFile array to main PackageFile array\n * Ensures all files have content property\n */\nfunction convertToMainPackageFiles(files: DetectionPackageFile[]): PackageFile[] {\n return files.map(f => ({\n path: f.path,\n content: f.content || ''\n }));\n}\n\n/**\n * Create a detection result for skipped conversion\n */\nfunction createSkipDetectionResult(files: DetectionPackageFile[]): EnhancedPackageFormat {\n return {\n packageFormat: 'universal',\n detectionMethod: 'package-marker',\n confidence: 1.0,\n analysis: {\n totalFiles: files.length,\n analyzedFiles: 0,\n skippedFiles: files.length,\n formatDistribution: new Map([['universal', files.length]])\n }\n };\n}\n\n/**\n * Create a detection result for error case\n */\nfunction createErrorDetectionResult(\n files: DetectionPackageFile[],\n error: unknown\n): EnhancedPackageFormat {\n logger.error('Detection failed, returning unknown format', { error });\n \n return {\n packageFormat: 'unknown',\n detectionMethod: 'package-marker',\n confidence: 0,\n analysis: {\n totalFiles: files.length,\n analyzedFiles: 0,\n skippedFiles: files.length,\n formatDistribution: new Map([['unknown', files.length]])\n }\n };\n}\n", "/**\n * Convert Phase\n * \n * Performs format detection and pre-conversion of packages.\n * Integrates Phase 2 detection and Phase 3 conversion into the pipeline.\n * \n * Phase 4: Integration with Existing Pipeline\n */\n\nimport type { InstallationContext } from '../context.js';\nimport { coordinateConversion } from '../../conversion-coordinator.js';\nimport { addWarning } from '../context-helpers.js';\nimport { logger } from '../../../../utils/logger.js';\nimport { readdir } from 'fs/promises';\nimport { join } from 'path';\nimport { readFile } from 'fs/promises';\nimport type { PackageFile as DetectionPackageFile } from '../../detection-types.js';\nimport { getPlatformDefinitions, matchesUniversalPattern } from '../../../platforms.js';\nimport { getPatternFromFlow } from '../../schema-registry.js';\nimport { matchPackagePath } from '../../../../utils/match-path.js';\nimport { getRelativePathFromBase } from '../../../../utils/path-normalization.js';\nimport { \n createTempPackageDirectory, \n writeTempPackageFiles,\n createConversionCacheDirectory,\n cleanupTempDirectory,\n cleanupStaleScopeDirs,\n type ConversionCacheScope\n} from '../../strategies/helpers/temp-directory.js';\n\n/**\n * Convert phase - detect format and pre-convert if needed\n * \n * This phase:\n * 1. Loads all files from content root\n * 2. Runs format detection (Tier 1 + Tier 2)\n * 3. Pre-converts if platform-specific format detected\n * 4. Updates context with conversion metadata\n * \n * @param ctx - Installation context\n */\nexport async function convertPhase(ctx: InstallationContext): Promise<void> {\n // Skip if no content root (shouldn't happen after load phase)\n if (!ctx.source.contentRoot) {\n logger.warn('No content root, skipping convert phase');\n return;\n }\n \n // Skip if package is a marketplace (will be handled by marketplace flow)\n if (ctx.source.pluginMetadata?.pluginType === 'marketplace') {\n return;\n }\n\n // Skip conversion for subset installs (individual resource selections via --agents, etc.).\n // These are single-file or single-directory resources where format conversion is unnecessary\n // and would break the matchedPattern by redirecting contentRoot to a temp directory.\n if ((ctx as { installScope?: string }).installScope === 'subset') {\n return;\n }\n \n try {\n // Load package files\n const files = await loadPackageFiles(ctx.source.contentRoot, {\n targetDir: ctx.targetDir,\n matchedPattern: ctx.matchedPattern\n });\n\n if (files.length === 0) {\n logger.warn('No files found in package, skipping conversion');\n return;\n }\n\n // Coordinate conversion (detection + conversion if needed)\n const conversionResult = await coordinateConversion(\n files,\n ctx.source.contentRoot,\n {\n targetDir: ctx.targetDir,\n skipConversion: false\n }\n );\n \n // Update context with conversion metadata\n ctx.formatDetection = conversionResult.formatDetection;\n ctx.wasPreConverted = conversionResult.wasConverted;\n \n if (conversionResult.errors.length > 0) {\n ctx.conversionErrors = conversionResult.errors;\n logger.warn('Conversion had errors', {\n errorCount: conversionResult.errors.length\n });\n }\n \n // Add warnings to context\n for (const warning of conversionResult.warnings) {\n addWarning(ctx, warning);\n }\n \n // Log conversion results\n if (conversionResult.wasConverted) {\n logger.info('Package pre-converted to universal format', {\n packageName: ctx.source.packageName,\n originalFormat: conversionResult.formatDetection.packageFormat,\n fileCount: conversionResult.files.length,\n detectionMethod: conversionResult.formatDetection.detectionMethod\n });\n\n // Store original contentRoot before any modifications\n const originalContentRoot = ctx.source.contentRoot;\n \n // Determine conversion strategy based on source location\n const isGitCache = originalContentRoot.includes('/.openpackage/cache/git/') || \n originalContentRoot.includes('.openpackage/cache/git/');\n \n let conversionRoot: string;\n let shouldCleanup = false;\n \n if (isGitCache) {\n // Git cache: Store converted files in scope-isolated .opkg-converted subdirectory.\n // Extract the git cache root (without any subdirectory)\n const gitCacheMatch = originalContentRoot.match(/(.+\\.openpackage\\/cache\\/git\\/[^\\/]+\\/[^\\/]+)/);\n const gitCacheRoot = gitCacheMatch ? gitCacheMatch[1] : originalContentRoot;\n \n // Determine the cache scope from the explicit installScope field.\n // This replaces the old string-heuristic check on matchedPattern.\n const cacheScope: ConversionCacheScope = ctx.installScope === 'subset' && ctx.matchedPattern\n ? { type: 'subset', pattern: ctx.matchedPattern }\n : { type: 'full' };\n\n // Always wipe the target scope directory before writing.\n // Each scope gets its own isolated directory, so wiping is safe and prevents\n // stale files from prior installs of the same scope from surviving.\n const scopedCacheDir = await createConversionCacheDirectory(gitCacheRoot, cacheScope);\n await cleanupTempDirectory(scopedCacheDir);\n\n // On full installs, also clean up any stale subset scope directories.\n // The full cache supersedes all subset caches.\n if (cacheScope.type === 'full') {\n await cleanupStaleScopeDirs(gitCacheRoot, cacheScope);\n }\n\n // Recreate the scope directory and write converted files\n conversionRoot = await createConversionCacheDirectory(gitCacheRoot, cacheScope);\n await writeTempPackageFiles(conversionResult.files, conversionRoot);\n shouldCleanup = false;\n logger.info(cacheScope.type === 'subset'\n ? 'Created isolated conversion cache for resource-scoped install'\n : 'Created fresh conversion cache for full install', {\n conversionRoot,\n scope: cacheScope.type,\n fileCount: conversionResult.files.length\n });\n } else {\n // Local path or other source: Use temporary directory\n conversionRoot = await createTempPackageDirectory('opkg-preconverted-');\n await writeTempPackageFiles(conversionResult.files, conversionRoot);\n shouldCleanup = true; // Cleanup temp directory after install\n logger.info('Created temporary conversion directory', { conversionRoot });\n }\n\n // Track temp dir for cleanup in the pipeline (only for non-git-cache)\n if (shouldCleanup) {\n ctx._tempConversionRoot = conversionRoot;\n }\n \n // Store original content root for index writing\n ctx._originalContentRoot = originalContentRoot;\n\n // Update content roots so installation uses converted files\n ctx.source.contentRoot = conversionRoot;\n const rootPkg = ctx.resolvedPackages.find((p: any) => p.isRoot);\n if (rootPkg) {\n rootPkg.contentRoot = conversionRoot;\n // Store original for index writing\n (rootPkg as any).originalContentRoot = originalContentRoot;\n }\n \n // Store converted files in package metadata for downstream use\n if (ctx.resolvedPackages.length > 0) {\n const rootPackage = ctx.resolvedPackages[0];\n if (rootPackage.pkg) {\n // Store converted files in package\n rootPackage.pkg.files = conversionResult.files;\n \n // Mark as converted in metadata\n if (!rootPackage.pkg.metadata) {\n rootPackage.pkg.metadata = {} as any;\n }\n (rootPackage.pkg.metadata as any)._wasConverted = true;\n (rootPackage.pkg.metadata as any)._originalFormat = conversionResult.formatDetection.packageFormat;\n }\n }\n }\n \n } catch (error) {\n // Conversion errors are non-fatal - log and continue\n logger.error('Convert phase failed', { error });\n addWarning(\n ctx,\n `Format conversion failed: ${error instanceof Error ? error.message : String(error)}`\n );\n \n // Store error in context\n if (!ctx.conversionErrors) {\n ctx.conversionErrors = [];\n }\n ctx.conversionErrors.push(\n error instanceof Error ? error : new Error(String(error))\n );\n }\n}\n\n/**\n * Load all package files from content root\n * \n * Recursively walks directory tree and loads all files.\n * \n * @param contentRoot - Package content root path\n * @returns Array of package files with paths and content\n */\nasync function loadPackageFiles(\n contentRoot: string,\n opts: { targetDir: string; matchedPattern?: string }\n): Promise<DetectionPackageFile[]> {\n const files: DetectionPackageFile[] = [];\n \n function isRelevantPath(relPath: string, targetDir: string, matchedPattern?: string): boolean {\n const normalized = relPath.replace(/\\\\/g, '/').replace(/^\\.\\/?/, '');\n const matchesMatchedPattern = matchedPattern ? matchPackagePath(normalized, matchedPattern) : true;\n if (!matchesMatchedPattern) {\n return false;\n }\n\n const isUniversal = matchesUniversalPattern(normalized, targetDir);\n let importMatch: { platformId: string; pattern: string } | null = null;\n\n if (!isUniversal) {\n // Platform-specific paths (declared by import flow \"from\" patterns in platforms.jsonc)\n const platforms = getPlatformDefinitions(targetDir);\n for (const [platformId, def] of Object.entries(platforms)) {\n const importFlows = def.import || [];\n for (const flow of importFlows) {\n const pattern = getPatternFromFlow(flow as any, 'from');\n if (pattern && matchPackagePath(normalized, pattern)) {\n importMatch = { platformId, pattern };\n break;\n }\n }\n if (importMatch) break;\n }\n }\n\n const relevant = isUniversal || importMatch !== null;\n\n return relevant;\n }\n\n // Build set of platform dot-directories from platform detection markers\n // so that .claude/, .cursor/, .agents/ etc. are walked for format conversion\n // while .git/, .github/, .vscode/ etc. are still skipped.\n const platformDotDirs = new Set<string>();\n const platformDefs = getPlatformDefinitions(opts.targetDir);\n for (const def of Object.values(platformDefs)) {\n for (const marker of (def as any).detection || []) {\n if (typeof marker === 'string' && marker.startsWith('.')) {\n platformDotDirs.add(marker.split('/')[0]);\n }\n }\n }\n\n async function walk(dir: string, baseDir: string, opts: { targetDir: string; matchedPattern?: string }): Promise<void> {\n const entries = await readdir(dir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = join(dir, entry.name);\n\n // Skip hidden directories (except known platform directories) and node_modules\n if (entry.isDirectory()) {\n if (entry.name.startsWith('.') && !platformDotDirs.has(entry.name)) {\n continue;\n }\n if (entry.name === 'node_modules') {\n continue;\n }\n\n await walk(fullPath, baseDir, opts);\n } else {\n const relativePath = getRelativePathFromBase(fullPath, baseDir);\n if (!isRelevantPath(relativePath, opts.targetDir, opts.matchedPattern)) {\n continue;\n }\n // Load file content\n try {\n const content = await readFile(fullPath, 'utf-8');\n \n files.push({\n path: relativePath,\n content\n });\n } catch (error) {\n logger.warn(`Failed to read file: ${fullPath}`, { error });\n }\n }\n }\n }\n \n await walk(contentRoot, contentRoot, opts);\n\n return files;\n}\n", "import { minimatch } from 'minimatch';\n\n/**\n * Match a package-relative path against a glob pattern.\n *\n * Always enables `dot: true` so that paths containing dot-prefixed segments\n * (e.g. `.opencode/`, `.claude/`) are matched correctly. Without this,\n * `minimatch(\"root/.opencode/hooks.ts\", \"**\")` returns false \u2014 silently\n * dropping files that live under dot-prefixed directories.\n */\nexport function matchPackagePath(path: string, pattern: string): boolean {\n return minimatch(path, pattern, { dot: true });\n}\n", "/**\n * Temp Directory Helpers Module\n * \n * Utilities for managing temporary directories during package conversion.\n */\n\nimport { join, dirname } from 'path';\nimport { mkdtemp, rm, readdir } from 'fs/promises';\nimport { tmpdir } from 'os';\nimport { createHash } from 'crypto';\nimport { ensureDir, writeTextFile } from '../../../../utils/fs.js';\nimport { logger } from '../../../../utils/logger.js';\nimport type { PackageConversionContext } from '../../../../types/conversion-context.js';\nimport { contextToJSON } from '../../../conversion-context/index.js';\n\n/**\n * Scope descriptor for conversion cache directory isolation.\n * \n * - 'full': All files in the package are being converted. Cache goes into `_full/`.\n * - { type: 'subset', pattern: string }: A filtered subset of files. Cache goes into\n * `_subset.<hash>/` where hash is derived from the matched pattern.\n * \n * This ensures that full installs and subset installs never share the same cache\n * directory, preventing stale-file contamination across different install scopes.\n */\nexport type ConversionCacheScope =\n | { type: 'full' }\n | { type: 'subset'; pattern: string };\n\n/**\n * Compute a short hash of a pattern string for use as a directory name.\n * Uses first 8 hex characters of SHA-256.\n */\nfunction shortPatternHash(pattern: string): string {\n return createHash('sha256').update(pattern).digest('hex').slice(0, 8);\n}\n\n/**\n * Create a temporary directory for package conversion\n * \n * @param prefix - Prefix for temp directory name\n * @returns Absolute path to created temp directory\n */\nexport async function createTempPackageDirectory(prefix: string = 'opkg-converted-'): Promise<string> {\n const tempDir = await mkdtemp(join(tmpdir(), prefix));\n return tempDir;\n}\n\n/**\n * Create a scope-isolated conversion cache directory within a git cache location.\n * \n * The directory layout under `.opkg-converted/` is partitioned by install scope:\n * \n * ```\n * .opkg-converted/\n * _full/ # Conversion cache for full installs\n * _subset.<hash>/ # Conversion cache for a specific subset pattern\n * ```\n * \n * This ensures that:\n * - A full install never reads stale files from a prior subset install\n * - A subset install never reads files from a prior full install\n * - Different subset patterns get their own isolated caches\n * \n * @param gitCachePath - Path to git cache directory (e.g., ~/.openpackage/cache/git/<hash>/<sha>)\n * @param scope - The scope descriptor that determines the cache subdirectory\n * @returns Absolute path to the scope-specific conversion cache directory\n */\nexport async function createConversionCacheDirectory(\n gitCachePath: string,\n scope: ConversionCacheScope = { type: 'full' }\n): Promise<string> {\n const scopeDir = scope.type === 'full'\n ? '_full'\n : `_subset.${shortPatternHash(scope.pattern)}`;\n const conversionDir = join(gitCachePath, '.opkg-converted', scopeDir);\n await ensureDir(conversionDir);\n return conversionDir;\n}\n\n/**\n * Clean up stale scope directories under `.opkg-converted/`.\n * \n * When performing a full install, prior subset caches are stale and should be removed\n * to reclaim disk space. The full cache supersedes all subset caches since it contains\n * the converted output for every file.\n * \n * @param gitCachePath - Path to git cache directory\n * @param keepScope - The scope being installed; directories matching this scope are preserved\n */\nexport async function cleanupStaleScopeDirs(\n gitCachePath: string,\n keepScope: ConversionCacheScope\n): Promise<void> {\n const convertedRoot = join(gitCachePath, '.opkg-converted');\n let entries: string[];\n try {\n entries = await readdir(convertedRoot);\n } catch {\n // Directory doesn't exist yet -- nothing to clean\n return;\n }\n\n const keepDir = keepScope.type === 'full'\n ? '_full'\n : `_subset.${shortPatternHash(keepScope.pattern)}`;\n\n for (const entry of entries) {\n if (entry === keepDir) continue;\n // Only clean scope directories (prefixed with _ to avoid cleaning unknown files)\n if (entry.startsWith('_full') || entry.startsWith('_subset.')) {\n const dirPath = join(convertedRoot, entry);\n try {\n await rm(dirPath, { recursive: true, force: true });\n logger.debug(`Cleaned stale conversion cache scope: ${entry}`);\n } catch (error) {\n logger.warn('Failed to clean stale conversion cache scope', { entry, error });\n }\n }\n }\n}\n\n/**\n * Write package files to temporary directory\n * \n * @param files - Array of files to write\n * @param tempDir - Temporary directory path\n */\nexport async function writeTempPackageFiles(\n files: Array<{ path: string; content: string }>,\n tempDir: string\n): Promise<void> {\n for (const file of files) {\n const filePath = join(tempDir, file.path);\n await ensureDir(dirname(filePath));\n await writeTextFile(filePath, file.content);\n }\n}\n\n/**\n * Write conversion context to temporary directory\n * \n * @param context - Conversion context to write\n * @param tempDir - Temporary directory path\n */\nexport async function writeConversionContext(\n context: PackageConversionContext,\n tempDir: string\n): Promise<void> {\n const contextPath = join(tempDir, '.opkg-conversion-context.json');\n const contextJson = contextToJSON(context);\n await writeTextFile(contextPath, contextJson);\n}\n\n/**\n * Cleanup temporary directory with error handling\n * \n * @param tempDir - Directory to cleanup\n */\nexport async function cleanupTempDirectory(tempDir: string | null): Promise<void> {\n if (!tempDir) {\n return;\n }\n \n try {\n await rm(tempDir, { recursive: true, force: true });\n } catch (error) {\n logger.warn('Failed to cleanup temp directory', {\n tempDir,\n error\n });\n }\n}\n", "import { join } from 'path';\nimport { InstallOptions } from '../../../types/index.js';\nimport type { InteractionPolicy } from '../../../core/interaction-policy.js';\nimport { PromptTier } from '../../../core/interaction-policy.js';\nimport type { ResolvedPackage } from '../../dependency-resolver/types.js';\nimport { checkExistingPackageInMarkdownFiles } from '../../openpackage.js';\nimport { parsePackageYml } from '../../../utils/package-yml.js';\nimport { exists } from '../../../utils/fs.js';\nimport { logger } from '../../../utils/logger.js';\nimport { getLocalPackageDir } from '../../../utils/paths.js';\nimport { FILE_PATTERNS } from '../../../constants/index.js';\nimport { getVersionInfoFromDependencyTree } from '../install-helpers.js';\nimport type { OutputPort } from '../../ports/output.js';\nimport type { PromptPort } from '../../ports/prompt.js';\nimport { resolvePrompt, resolveOutput } from '../../ports/resolve.js';\n\n/**\n * Get currently installed version from .openpackage/packages/<package>/openpackage.yml\n */\nasync function getInstalledPackageVersion(cwd: string, packageName: string): Promise<string | undefined> {\n try {\n const packageDir = getLocalPackageDir(cwd, packageName);\n const packageYmlPath = join(packageDir, FILE_PATTERNS.OPENPACKAGE_YML);\n if (await exists(packageYmlPath)) {\n const config = await parsePackageYml(packageYmlPath);\n return config.version;\n }\n } catch {\n // ignore parse errors; treat as unknown\n }\n return undefined;\n}\n\n/**\n * Check for conflicts with all packages in the dependency tree\n */\nexport async function checkAndHandleAllPackageConflicts(\n resolvedPackages: ResolvedPackage[],\n options: InstallOptions,\n policy?: InteractionPolicy,\n prompt?: PromptPort,\n output?: OutputPort\n): Promise<{ shouldProceed: boolean; skippedPackages: string[]; forceOverwritePackages: Set<string> }> {\n const cwd = process.cwd();\n const skippedPackages: string[] = [];\n const forceOverwritePackages = new Set<string>();\n const p = prompt ?? resolvePrompt();\n const out = output ?? resolveOutput();\n \n // Check each package in the dependency tree for conflicts\n for (const resolved of resolvedPackages) {\n const existingCheck = await checkExistingPackageInMarkdownFiles(cwd, resolved.name);\n \n if (existingCheck.found) {\n const versionInfo = await getVersionInfoFromDependencyTree(resolved.name, resolvedPackages);\n const existingVersion = existingCheck.version || await getInstalledPackageVersion(cwd, resolved.name);\n \n\n \n if (options.dryRun) {\n // In dry run mode, proceed; per-file logic will report decisions\n continue;\n }\n \n if (options.force) {\n // When --force is used, automatically overwrite all conflicts\n logger.info(`Force flag set - automatically overwriting package '${resolved.name}' v${existingVersion}`);\n forceOverwritePackages.add(resolved.name);\n continue;\n }\n \n // Prompt per package overwrite confirmation when existing detected\n if (policy && !policy.canPrompt(PromptTier.Confirmation)) {\n out.warn(`Skipping '${resolved.name}' (already exists). Use --force to overwrite.`);\n skippedPackages.push(resolved.name);\n } else {\n const versionSuffix = existingVersion ? ` (${existingVersion})` : '';\n const confirmed = await p.confirm(\n `Package '${resolved.name}' already exists${versionSuffix}. Overwrite all files?`\n );\n if (confirmed) {\n forceOverwritePackages.add(resolved.name);\n } else {\n skippedPackages.push(resolved.name);\n }\n }\n continue;\n }\n }\n \n return { shouldProceed: true, skippedPackages, forceOverwritePackages };\n}\n", "/**\n * Interaction Policy\n * \n * Single source of truth for whether the CLI can prompt the user.\n * Created once at command entry and threaded through all handlers.\n * \n * Prompt Tiers:\n * 0 - Required: Platform selection, marketplace plugin pick\n * 1 - Disambiguation: Ambiguous base directory selection\n * 2 - Confirmation: Overwrite confirmations\n * 3 - ConflictResolution: Version conflict selection\n * 4 - OptionalMenu: --interactive resource selection menus\n */\n\nexport enum PromptTier {\n Required = 0,\n Disambiguation = 1,\n Confirmation = 2,\n ConflictResolution = 3,\n OptionalMenu = 4,\n}\n\nexport type InteractionMode = 'never' | 'auto' | 'always';\n\nexport interface InteractionPolicy {\n readonly mode: InteractionMode;\n readonly isTTY: boolean;\n canPrompt(tier: PromptTier): boolean;\n}\n\n/**\n * Create an interaction policy from command options.\n * \n * Mode resolution:\n * --interactive + TTY \u2192 'always' (all tiers allowed)\n * --interactive + !TTY \u2192 throws (user explicitly asked for interactive)\n * CI=true or !TTY \u2192 'never' (no prompts, errors or safe defaults)\n * default TTY \u2192 'auto' (ambient prompts for tiers 0-3, never tier 4)\n */\nexport function createInteractionPolicy(options: {\n interactive?: boolean;\n force?: boolean;\n}): InteractionPolicy {\n const isTTY = process.stdin.isTTY === true;\n\n let mode: InteractionMode;\n if (options.interactive) {\n if (!isTTY) {\n throw new Error(\n '--interactive requires an interactive terminal (TTY). ' +\n 'Use specific filters (--agents, --skills, etc.) for non-interactive installs.'\n );\n }\n mode = 'always';\n } else if (!isTTY || process.env.CI === 'true') {\n mode = 'never';\n } else {\n mode = 'auto';\n }\n\n return {\n mode,\n isTTY,\n canPrompt(tier: PromptTier): boolean {\n if (mode === 'never') return false;\n if (mode === 'always') return true;\n // 'auto': allow ambient prompts (tiers 0-3), never tier 4 (optional menus)\n return tier < PromptTier.OptionalMenu;\n }\n };\n}\n", "import { join } from 'path';\nimport { PackageYml, PackageDependency } from '../types/index.js';\nimport { parsePackageYml } from '../utils/package-yml.js';\nimport { exists, isDirectory, listDirectories } from '../utils/fs.js';\nimport { logger } from '../utils/logger.js';\nimport { FILE_PATTERNS } from '../constants/index.js';\nimport { DEFAULT_INSTALL_ROOT } from '../constants/workspace.js';\nimport { getLocalPackageYmlPath, getLocalPackagesDir } from '../utils/paths.js';\nimport { findFilesByExtension, findDirectoriesContainingFile } from '../utils/file-processing.js';\nimport { getDetectedPlatforms, getPlatformDefinition, type Platform } from './platforms.js';\nimport { arePackageNamesEquivalent } from '../utils/package-name.js';\nimport { extractToPatternString } from './flows/to-pattern-extractor.js';\nimport { extractDefaultPattern } from './flows/switch-resolver.js';\n\n/**\n * Package metadata from openpackage directory\n */\nexport interface OpenPackagePackage {\n name: string;\n version?: string;\n description?: string;\n dependencies?: PackageDependency[];\n 'dev-dependencies'?: PackageDependency[];\n path: string;\n}\n\n/**\n * Find package config file in a directory.\n * Prefers v2 layout: <dir>/openpackage.yml, then <dir>/.openpackage/openpackage.yml (workspace-style).\n */\nasync function findPackageConfigFile(directoryPath: string): Promise<string | null> {\n const rootManifest = join(directoryPath, FILE_PATTERNS.OPENPACKAGE_YML);\n const workspaceManifest = getLocalPackageYmlPath(directoryPath);\n \n if (await exists(rootManifest)) {\n return rootManifest;\n } else if (await exists(workspaceManifest)) {\n return workspaceManifest;\n }\n \n return null;\n}\n\n/**\n * Get the version of an installed package by package name\n */\nexport async function getInstalledPackageVersion(packageName: string, targetDir: string): Promise<string | null> {\n const openpackagePath = join(targetDir, DEFAULT_INSTALL_ROOT);\n const packageOpenPackagePath = join(openpackagePath, packageName);\n \n if (!(await exists(packageOpenPackagePath))) {\n return null;\n }\n \n const configPath = await findPackageConfigFile(packageOpenPackagePath);\n if (!configPath) {\n return null;\n }\n \n try {\n const config = await parsePackageYml(configPath);\n return config.version ?? null;\n } catch (error) {\n logger.warn(`Failed to parse package config for ${packageName}: ${error}`);\n return null;\n }\n}\n\n/**\n * Find package directory in ai by matching package name\n */\nexport async function findPackageDirectory(openpackagePath: string, packageName: string): Promise<string | null> {\n if (!(await exists(openpackagePath)) || !(await isDirectory(openpackagePath))) {\n return null;\n }\n\n try {\n const subdirectories = await listDirectories(openpackagePath);\n \n for (const subdir of subdirectories) {\n const subdirPath = join(openpackagePath, subdir);\n const configPath = await findPackageConfigFile(subdirPath);\n \n if (configPath) {\n try {\n const packageConfig = await parsePackageYml(configPath);\n if (arePackageNamesEquivalent(packageConfig.name, packageName)) {\n return subdirPath;\n }\n } catch (error) {\n logger.warn(`Failed to parse package file ${configPath}: ${error}`);\n }\n }\n }\n \n return null;\n } catch (error) {\n logger.error(`Failed to search ai directory: ${error}`);\n return null;\n }\n}\n\n/**\n * Scan openpackage directory for all available packages\n */\nexport async function scanOpenPackagePackages(openpackagePath: string): Promise<Map<string, OpenPackagePackage>> {\n const packages = new Map<string, OpenPackagePackage>();\n\n if (!(await exists(openpackagePath)) || !(await isDirectory(openpackagePath))) {\n logger.debug('Install root directory not found or not a directory', { openpackagePath });\n return packages;\n }\n\n try {\n // Find all openpackage.yml files recursively under the packages directory\n const packagesDir = getLocalPackagesDir(openpackagePath);\n if (!(await exists(packagesDir))) {\n return packages;\n }\n\n const packageDirs = await findDirectoriesContainingFile(\n packagesDir,\n FILE_PATTERNS.OPENPACKAGE_YML,\n async (filePath) => {\n try {\n return await parsePackageYml(filePath);\n } catch (error) {\n logger.warn(`Failed to parse package file ${filePath}: ${error}`);\n return null;\n }\n }\n );\n\n for (const { dirPath, parsedContent } of packageDirs) {\n if (parsedContent) {\n const packageConfig = parsedContent;\n packages.set(packageConfig.name, {\n name: packageConfig.name,\n version: packageConfig.version,\n description: packageConfig.description,\n dependencies: packageConfig.dependencies || [],\n 'dev-dependencies': packageConfig['dev-dependencies'] || [],\n path: dirPath\n });\n }\n }\n } catch (error) {\n logger.error(`Failed to scan ai directory: ${error}`);\n }\n\n return packages;\n}\n\n/**\n * Gather version constraints from the main and cached package openpackage.yml files\n */\nexport async function gatherGlobalVersionConstraints(cwd: string, includeResolutions: boolean = true): Promise<Map<string, string[]>> {\n const constraints = new Map<string, Set<string>>();\n\n const addConstraint = (name?: string, range?: string) => {\n if (!name || !range) {\n return;\n }\n\n const trimmedName = name.trim();\n const trimmedRange = range.trim();\n\n if (!trimmedName || !trimmedRange) {\n return;\n }\n\n if (!constraints.has(trimmedName)) {\n constraints.set(trimmedName, new Set());\n }\n\n constraints.get(trimmedName)!.add(trimmedRange);\n };\n\n const collectFromConfig = (config: PackageYml | null | undefined) => {\n if (!config) {\n return;\n }\n\n config.dependencies?.forEach(dep => addConstraint(dep.name, dep.version));\n config['dev-dependencies']?.forEach(dep => addConstraint(dep.name, dep.version));\n };\n\n // Collect from main .openpackage/openpackage.yml if present\n const mainPackagePath = getLocalPackageYmlPath(cwd);\n if (await exists(mainPackagePath)) {\n try {\n const mainConfig = await parsePackageYml(mainPackagePath);\n collectFromConfig(mainConfig);\n } catch (error) {\n logger.debug(`Failed to parse main openpackage.yml for constraints: ${error}`);\n }\n }\n\n // Collect from each package under .openpackage/packages\n const packagesDir = getLocalPackagesDir(cwd);\n if (await exists(packagesDir) && await isDirectory(packagesDir)) {\n try {\n const packageDirs = await findDirectoriesContainingFile(\n packagesDir,\n FILE_PATTERNS.OPENPACKAGE_YML,\n async (filePath) => {\n try {\n return await parsePackageYml(filePath);\n } catch (error) {\n logger.debug(`Failed to parse openpackage.yml at ${filePath}: ${error}`);\n return null;\n }\n }\n );\n\n for (const { parsedContent } of packageDirs) {\n collectFromConfig(parsedContent);\n }\n } catch (error) {\n logger.debug(`Failed to enumerate packages directory for constraints: ${error}`);\n }\n }\n\n const result = new Map<string, string[]>();\n for (const [name, ranges] of constraints) {\n result.set(name, Array.from(ranges));\n }\n\n return result;\n}\n\n/**\n * Gather version constraints only from the main .openpackage/openpackage.yml\n * Used to treat root-declared versions as authoritative overrides\n */\nexport async function gatherRootVersionConstraints(cwd: string): Promise<Map<string, string[]>> {\n const result = new Map<string, string[]>();\n\n const addConstraint = (name?: string, range?: string) => {\n if (!name || !range) return;\n const trimmedName = name.trim();\n const trimmedRange = range.trim();\n if (!trimmedName || !trimmedRange) return;\n if (!result.has(trimmedName)) result.set(trimmedName, []);\n const arr = result.get(trimmedName)!;\n if (!arr.includes(trimmedRange)) arr.push(trimmedRange);\n };\n\n const mainPackagePath = getLocalPackageYmlPath(cwd);\n if (await exists(mainPackagePath)) {\n try {\n const mainConfig = await parsePackageYml(mainPackagePath);\n mainConfig.dependencies?.forEach(dep => addConstraint(dep.name, dep.version));\n mainConfig['dev-dependencies']?.forEach(dep => addConstraint(dep.name, dep.version));\n } catch (error) {\n logger.debug(`Failed to parse main openpackage.yml for root constraints: ${error}`);\n }\n }\n\n return result;\n}\n\n/**\n * Get package configuration\n */\nexport async function getOpenPackagePackageConfig(openpackagePath: string, packageName: string): Promise<PackageYml | null> {\n const packagePath = await findPackageDirectory(openpackagePath, packageName);\n if (!packagePath) {\n return null;\n }\n \n const configPath = await findPackageConfigFile(packagePath);\n if (!configPath) {\n return null;\n }\n \n try {\n return await parsePackageYml(configPath);\n } catch (error) {\n logger.warn(`Failed to parse package config for ${packageName}: ${error}`);\n return null;\n }\n}\n\n/**\n * Check for existing installed package by searching markdown files in ai, .claude, and .cursor directories\n */\nexport async function checkExistingPackageInMarkdownFiles(\n cwd: string, \n packageName: string\n): Promise<{ found: boolean; version?: string; location?: string }> {\n // Build search targets: ai directory + all detected platform subdirectories\n const targets: Array<{ dir: string; exts?: string[]; label: string }> = [];\n\n // Always include workspace install root\n targets.push({\n dir: join(cwd, DEFAULT_INSTALL_ROOT),\n exts: [FILE_PATTERNS.MD_FILES],\n label: DEFAULT_INSTALL_ROOT\n });\n\n // Add detected platforms' directories from flows\n try {\n const platforms = await getDetectedPlatforms(cwd);\n for (const platform of platforms) {\n const def = getPlatformDefinition(platform as Platform);\n \n // Extract directories from export flows (package \u2192 workspace)\n if (def.export && def.export.length > 0) {\n const platformDirs = new Set<string>();\n \n for (const flow of def.export) {\n const toPattern = extractToPatternString(flow.to, extractDefaultPattern);\n if (toPattern) {\n // Extract directory from pattern\n const parts = toPattern.split('/');\n if (parts.length > 1) {\n const fullPath = parts.slice(0, -1).join('/');\n platformDirs.add(fullPath);\n }\n }\n }\n \n // Add each unique directory as a target\n for (const dirPath of platformDirs) {\n const fullDirPath = join(cwd, dirPath);\n targets.push({ \n dir: fullDirPath, \n exts: undefined, // Allow all extensions (flows handle this)\n label: def.id \n });\n }\n }\n }\n } catch (error) {\n logger.debug(`Failed to build platform search targets: ${error}`);\n }\n\n logger.debug(`Checking for existing package '${packageName}' across ${targets.length} locations`);\n\n // Search each target directory for files with supported extensions\n for (const target of targets) {\n const extensions = target.exts;\n if (extensions && extensions.length === 0) {\n continue;\n }\n\n try {\n const files = await findFilesByExtension(target.dir, extensions ?? []);\n for (const file of files) {\n // Frontmatter support removed - cannot determine package ownership\n }\n } catch (dirErr) {\n logger.debug(`Failed to search directory ${target.dir}: ${dirErr}`);\n }\n }\n\n return { found: false };\n}\n", "import * as semver from 'semver';\nimport { PackageYml } from '../../types/index.js';\nimport type { ResolvedPackage } from '../dependency-resolver/types.js';\nimport { arePackageNamesEquivalent } from '../../utils/package-name.js';\nimport { getLocalPackageYmlPath } from '../../utils/paths.js';\nimport { parsePackageYml } from '../../utils/package-yml.js';\nimport { exists } from '../../utils/fs.js';\n\n/**\n * Extract packages from openpackage.yml configuration\n */\nexport function extractPackagesFromConfig(config: PackageYml): Array<{ name: string; version?: string; path?: string; git?: string; url?: string; ref?: string; subdirectory?: string; isDev: boolean }> {\n const packages: Array<{ name: string; version?: string; path?: string; git?: string; url?: string; ref?: string; subdirectory?: string; isDev: boolean }> = [];\n \n const processSection = (section: 'dependencies' | 'dev-dependencies', isDev: boolean) => {\n const deps = config[section];\n if (deps) {\n for (const pkg of deps) {\n packages.push({\n name: pkg.name,\n version: pkg.version,\n path: pkg.path,\n git: (pkg as any).git,\n url: (pkg as any).url,\n ref: (pkg as any).ref,\n subdirectory: (pkg as any).subdirectory,\n isDev\n });\n }\n }\n };\n\n processSection('dependencies', false);\n processSection('dev-dependencies', true);\n \n return packages;\n}\n\n\n\n/**\n * Get the highest version and required version of a package from the dependency tree\n */\nexport async function getVersionInfoFromDependencyTree(\n packageName: string,\n resolvedPackages: ResolvedPackage[]\n): Promise<{ highestVersion: string; requiredVersion?: string }> {\n let highestVersion = '0.0.0';\n let highestRequiredVersion: string | undefined;\n \n // Get the requiredVersions map from the first resolved package\n const requiredVersions = (resolvedPackages[0] as any)?.requiredVersions as Map<string, string[]> | undefined;\n \n for (const resolved of resolvedPackages) {\n if (arePackageNamesEquivalent(resolved.name, packageName)) {\n if (semver.gt(resolved.version, highestVersion)) {\n highestVersion = resolved.version;\n }\n }\n }\n \n // Get the highest required version from all specified versions for this package\n if (requiredVersions && requiredVersions.has(packageName)) {\n const versions = requiredVersions.get(packageName)!;\n for (const version of versions) {\n if (!highestRequiredVersion || semver.gt(version, highestRequiredVersion)) {\n highestRequiredVersion = version;\n }\n }\n }\n \n return { highestVersion, requiredVersion: highestRequiredVersion };\n}\n\n/**\n * Check if a package name refers to an existing path/git-based dependency in openpackage.yml\n * Returns the dependency source if found, null otherwise\n */\nexport async function findExistingPathOrGitSource(\n cwd: string,\n packageName: string\n): Promise<\n | { type: 'path'; path: string }\n | { type: 'git'; url: string; ref?: string; subdir?: string }\n | null\n> {\n const packageYmlPath = getLocalPackageYmlPath(cwd);\n if (!(await exists(packageYmlPath))) {\n return null;\n }\n\n const config = await parsePackageYml(packageYmlPath);\n const allDeps = [...(config.dependencies || []), ...(config['dev-dependencies'] || [])];\n \n const dep = allDeps.find(d => arePackageNamesEquivalent(d.name, packageName));\n if (!dep) {\n return null;\n }\n\n // Handle both new (url) and legacy (git) fields\n if (dep.url || dep.git) {\n const gitUrlRaw = dep.url || dep.git!;\n \n // Parse url field to extract ref if embedded\n const [gitUrl, embeddedRef] = gitUrlRaw.includes('#') \n ? gitUrlRaw.split('#', 2)\n : [gitUrlRaw, undefined];\n \n // Use embedded ref if present, otherwise fall back to separate ref field\n const ref = embeddedRef || dep.ref;\n \n return { type: 'git', url: gitUrl, ref, subdir: dep.base ?? dep.path };\n }\n\n if (dep.base || dep.path) {\n return { type: 'path', path: (dep.base ?? dep.path)! };\n }\n\n return null;\n}\n", "import type { InstallationContext } from '../context.js';\nimport { checkAndHandleAllPackageConflicts } from '../../operations/conflict-handler.js';\nimport { logger } from '../../../../utils/logger.js';\n\n/**\n * Process conflicts phase\n * @returns true if should proceed, false if cancelled\n */\nexport async function processConflictsPhase(ctx: InstallationContext): Promise<boolean> {\n const conflictResult = await checkAndHandleAllPackageConflicts(\n ctx.resolvedPackages as any,\n ctx.options,\n ctx.execution?.interactionPolicy,\n ctx.execution?.prompt,\n ctx.execution?.output\n );\n \n if (!conflictResult.shouldProceed) {\n return false;\n }\n \n // Update resolved packages based on conflict resolution\n ctx.resolvedPackages = ctx.resolvedPackages.filter(pkg => !conflictResult.skippedPackages.includes(pkg.name));\n \n // Store conflict result in context for use in the execute phase\n ctx.conflictResult = conflictResult;\n \n return true;\n}\n", "/**\n * Platform Root Files Utilities\n * Shared utilities for collecting and working with platform root file names\n */\n\nimport { FILE_PATTERNS } from '../../constants/index.js';\nimport type { Platform } from '../../types/platform.js';\nimport { getPlatformDefinition } from '../platforms.js';\n\n/**\n * Get all platform root file names (including universal AGENTS.md) for the given platforms.\n * @param platforms - Array of platforms to collect root files from\n * @param targetDir - Optional target directory for platform config overrides\n * @returns Set of root file names\n */\nexport function getPlatformRootFileNames(platforms: Platform[], targetDir?: string): Set<string> {\n const names = new Set<string>([FILE_PATTERNS.AGENTS_MD]);\n for (const platform of platforms) {\n const def = getPlatformDefinition(platform, targetDir);\n if (def.rootFile) {\n names.add(def.rootFile);\n }\n }\n return names;\n}\n", "import { packageManager } from '../../package.js';\nimport { FILE_PATTERNS, PACKAGE_PATHS } from '../../../constants/index.js';\nimport type { PackageFile } from '../../../types/index.js';\nimport type { Platform } from '../../platforms.js';\nimport { isManifestPath, normalizePackagePath } from '../../../utils/manifest-paths.js';\nimport { getPlatformRootFileNames } from '../../platform/platform-root-files.js';\nimport { join } from 'path';\nimport { exists } from '../../../utils/fs.js';\nimport { hasPluginContent } from '../plugin-detector.js';\nimport { matchPackagePath } from '../../../utils/match-path.js';\n\nexport interface CategorizedInstallFiles {\n pathBasedFiles: PackageFile[];\n rootFiles: Map<string, string>;\n}\n\nexport async function discoverAndCategorizeFiles(\n packageName: string,\n version: string,\n platforms: Platform[],\n contentRoot?: string,\n matchedPattern?: string // Phase 4: Pattern for filtering\n): Promise<CategorizedInstallFiles> {\n // Root file discovery only works for OpenPackage dirs or Claude plugins.\n // Also treat directories with plugin content (commands/agents/skills/hooks or .mcp.json/.lsp.json)\n // as loadable so marketplace-defined plugins without plugin.json can install.\n if (contentRoot) {\n const hasOpenPackageYml = await exists(join(contentRoot, 'openpackage.yml'));\n const hasClaudePluginJson = await exists(join(contentRoot, '.claude-plugin', 'plugin.json'));\n const hasClaudeMarketplaceJson = await exists(join(contentRoot, '.claude-plugin', 'marketplace.json'));\n const hasPluginContentDirs = await hasPluginContent(contentRoot);\n const isLoadableRoot =\n hasOpenPackageYml || hasClaudePluginJson || hasClaudeMarketplaceJson || hasPluginContentDirs;\n\n if (!isLoadableRoot) {\n return { pathBasedFiles: [], rootFiles: new Map() };\n }\n }\n\n // Load once\n const pkg = await packageManager.loadPackage(packageName, version, {\n packageRootDir: contentRoot\n });\n\n // Phase 4: Build include filter that considers matchedPattern\n const shouldInclude = (path: string): boolean => {\n // Check matched pattern (from base detection or resource scoping)\n if (matchedPattern && !matchPackagePath(path, matchedPattern)) {\n return false;\n }\n \n return true;\n };\n\n // Precompute platform root filenames\n const platformRootNames = getPlatformRootFileNames(platforms);\n\n // Single pass classification\n const pathBasedFiles: PackageFile[] = [];\n const rootFiles = new Map<string, string>();\n for (const file of pkg.files) {\n const p = file.path;\n const normalized = normalizePackagePath(p);\n // Never install registry package metadata files\n if (isManifestPath(p) || normalized === PACKAGE_PATHS.INDEX_RELATIVE) continue;\n if (!shouldInclude(p)) continue;\n\n pathBasedFiles.push(file);\n\n if (normalized === FILE_PATTERNS.AGENTS_MD || platformRootNames.has(normalized)) {\n rootFiles.set(normalized, file.content);\n }\n }\n\n return { pathBasedFiles, rootFiles };\n}\n\n\n", "/**\n * Unified Root File Operations\n * Handles root file installation and synchronization for both install and apply commands\n */\n\nimport { join } from 'path';\nimport { exists, readTextFile, writeTextFile } from '../../../utils/fs.js';\nimport { mergePackageContentIntoRootFile } from '../../../utils/root-file-merger.js';\nimport { extractPackageContentFromRootFile } from '../../../utils/root-file-extractor.js';\nimport { getPlatformDefinition, getAllPlatforms, type Platform } from '../../platforms.js';\nimport { FILE_PATTERNS } from '../../../constants/index.js';\nimport { logger } from '../../../utils/logger.js';\nimport { getPathLeaf } from '../../../utils/path-normalization.js';\nimport type { PackageFile } from '../../../types/index.js';\n\n/**\n * Input for root file operations - supports both formats\n */\nexport type RootFileInput = \n | Map<string, string> // From install flow\n | PackageFile[] // From apply flow\n | Record<string, string>; // Alternative map format\n\n/**\n * Result of root file operations\n */\nexport interface RootFileOperationResult {\n /** Files that were newly created */\n created: string[];\n \n /** Files that were updated (already existed) */\n updated: string[];\n \n /** Files that were skipped (no changes needed) */\n skipped: string[];\n \n /** Legacy alias for created (for backward compatibility) */\n installed?: string[];\n}\n\n/**\n * Universal root file installer/syncer\n * \n * Handles root file installation and synchronization for both install and apply commands.\n * Supports multiple input formats for maximum flexibility.\n * \n * @param cwd - Current working directory\n * @param packageName - Name of the package\n * @param rootFiles - Root files to install (Map, PackageFile[], or Record)\n * @param platforms - Target platforms\n * @returns Operation result with created/updated/skipped files\n */\nexport async function installOrSyncRootFiles(\n cwd: string,\n packageName: string,\n rootFiles: RootFileInput,\n platforms: Platform[]\n): Promise<RootFileOperationResult> {\n const result: RootFileOperationResult = {\n created: [],\n updated: [],\n skipped: []\n };\n \n // Normalize input to a common format\n const rootFilesMap = normalizeRootFileInput(rootFiles);\n \n if (rootFilesMap.size === 0) {\n return result;\n }\n \n // Always install/sync universal AGENTS.md regardless of platform detection\n await installUniversalAgentsFile(cwd, packageName, rootFilesMap, result);\n \n // Install platform-specific root files\n if (platforms.length > 0) {\n await installPlatformRootFiles(cwd, packageName, rootFilesMap, platforms, result);\n }\n \n // Deduplicate results\n result.created = Array.from(new Set(result.created));\n result.updated = Array.from(new Set(result.updated));\n result.skipped = Array.from(new Set(result.skipped));\n \n // Add legacy alias for backward compatibility\n result.installed = result.created;\n \n return result;\n}\n\n/**\n * Normalize different input formats to a common Map structure\n */\nfunction normalizeRootFileInput(input: RootFileInput): Map<string, string> {\n // Already a Map\n if (input instanceof Map) {\n return input;\n }\n \n // PackageFile array\n if (Array.isArray(input)) {\n const map = new Map<string, string>();\n for (const file of input) {\n const fileName = getPathLeaf(file.path);\n if (fileName && isRootFile(fileName)) {\n map.set(fileName, file.content);\n }\n }\n return map;\n }\n \n // Plain object/Record\n return new Map(Object.entries(input));\n}\n\n/**\n * Check if a file name is a root file\n */\nfunction isRootFile(fileName: string): boolean {\n const rootFileNames = new Set<string>([FILE_PATTERNS.AGENTS_MD]);\n \n // Add all platform root files\n for (const platform of getAllPlatforms()) {\n const def = getPlatformDefinition(platform);\n if (def.rootFile) {\n rootFileNames.add(def.rootFile);\n }\n }\n \n return rootFileNames.has(fileName);\n}\n\n/**\n * Install universal AGENTS.md file\n */\nasync function installUniversalAgentsFile(\n cwd: string,\n packageName: string,\n rootFilesMap: Map<string, string>,\n result: RootFileOperationResult\n): Promise<void> {\n const agentsContent = rootFilesMap.get(FILE_PATTERNS.AGENTS_MD);\n \n if (!agentsContent || !agentsContent.trim()) {\n return;\n }\n \n try {\n const targetPath = join(cwd, FILE_PATTERNS.AGENTS_MD);\n const wasUpdated = await installSingleRootFile(\n targetPath,\n packageName,\n agentsContent.trim()\n );\n \n if (wasUpdated) {\n result.updated.push(FILE_PATTERNS.AGENTS_MD);\n } else {\n result.created.push(FILE_PATTERNS.AGENTS_MD);\n }\n } catch (error) {\n logger.error(`Failed to install universal root file ${FILE_PATTERNS.AGENTS_MD}: ${error}`);\n result.skipped.push(FILE_PATTERNS.AGENTS_MD);\n }\n}\n\n/**\n * Install platform-specific root files\n */\nasync function installPlatformRootFiles(\n cwd: string,\n packageName: string,\n rootFilesMap: Map<string, string>,\n platforms: Platform[],\n result: RootFileOperationResult\n): Promise<void> {\n for (const platform of platforms) {\n const platformDef = getPlatformDefinition(platform);\n \n if (!platformDef.rootFile) {\n continue; // Platform doesn't use root files\n }\n \n // Skip if already handled by universal AGENTS.md\n if (platformDef.rootFile === FILE_PATTERNS.AGENTS_MD) {\n continue;\n }\n \n // Prefer platform-specific file, fallback to AGENTS.md\n let content = rootFilesMap.get(platformDef.rootFile);\n let sourceFileName = platformDef.rootFile;\n \n if (!content && rootFilesMap.has(FILE_PATTERNS.AGENTS_MD)) {\n content = rootFilesMap.get(FILE_PATTERNS.AGENTS_MD)!;\n sourceFileName = FILE_PATTERNS.AGENTS_MD;\n }\n \n if (!content || !content.trim()) {\n continue;\n }\n \n try {\n const targetPath = join(cwd, platformDef.rootFile);\n const wasUpdated = await installSingleRootFile(\n targetPath,\n packageName,\n content.trim()\n );\n \n if (wasUpdated) {\n result.updated.push(platformDef.rootFile);\n } else {\n result.created.push(platformDef.rootFile);\n }\n } catch (error) {\n logger.error(`Failed to install root file ${platformDef.rootFile}: ${error}`);\n result.skipped.push(platformDef.rootFile);\n }\n }\n}\n\n/**\n * Install or update a single root file\n * @returns true if file was updated (existed before), false if newly created\n */\nasync function installSingleRootFile(\n targetPath: string,\n packageName: string,\n sectionBody: string\n): Promise<boolean> {\n // Read existing content if file exists\n let existingContent = '';\n let fileExists = false;\n \n if (await exists(targetPath)) {\n existingContent = await readTextFile(targetPath);\n fileExists = true;\n }\n \n // Check if section content is unchanged (optimization)\n if (fileExists) {\n const existingSectionContent = extractPackageContentFromRootFile(\n existingContent,\n packageName\n )?.trim();\n \n if (existingSectionContent === sectionBody) {\n return true; // Still counts as \"updated\" (touched but unchanged)\n }\n }\n \n // Merge package content into the file\n const mergedContent = mergePackageContentIntoRootFile(\n existingContent,\n packageName,\n sectionBody\n );\n \n // Write the merged content\n await writeTextFile(targetPath, mergedContent);\n \n return fileExists;\n}\n", "\n/**\n * Utility for extracting and handling package-specific content markers\n * for root files (AGENTS.md, CLAUDE.md, etc.).\n *\n * Marker format:\n * <!-- package: <package-name> --> ... <!-- -->\n */\n\n/**\n * Escape a string for safe insertion into a RegExp pattern.\n */\nexport function escapeRegExp(input: string): string {\n return input.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&');\n}\n\n/** Build the literal open marker string for a package */\nexport function buildOpenMarker(packageName: string): string {\n return `<!-- package: ${packageName} -->`;\n}\n\n/** Constant close marker string */\nexport const CLOSE_MARKER = '<!-- -->';\n\n/** Build a case-insensitive regex to match the open marker for a package */\nexport function buildOpenMarkerRegex(packageName: string): RegExp {\n const namePattern = escapeRegExp(packageName);\n return new RegExp(`<!--\\\\s*package:\\\\s*${namePattern}\\\\s*-->`, 'i');\n}\n\n/** Case-insensitive regex to match any package open marker */\nexport const OPEN_MARKER_ANY_REGEX = /<!--\\s*package:\\s*[^\\s>]+?\\s*-->/i;\n\n/** Case-insensitive regex to match the close marker */\nexport const CLOSE_MARKER_REGEX = /<!--\\s*-->/i;\n\n/**\n * Global, non-greedy match between open/close markers, capturing package name and body\n * Example: <!-- package: name --> ... <!-- -->\n */\nexport const FORMULA_SECTION_GLOBAL_REGEX = /<!--\\s*package:\\s*([^\\s]+)\\s*-->([\\s\\S]*?)<!--\\s*-->/gi;\n\n/**\n * Detect whether content includes a marker-wrapped section.\n * If packageName is provided, ensures the open marker matches it.\n */\nexport function isMarkerWrappedContent(content: string, packageName?: string): boolean {\n if (!content) return false;\n const hasClose = CLOSE_MARKER_REGEX.test(content);\n if (!hasClose) return false;\n if (packageName) {\n return buildOpenMarkerRegex(packageName).test(content);\n }\n return OPEN_MARKER_ANY_REGEX.test(content);\n}\n\n/**\n * Extract content for a specific package from AGENTS.md content.\n *\n * - Opening marker: <!-- package: <packageName> -->\n * - Closing marker: <!-- --> (optionally allowing internal whitespace)\n *\n * Returns null if no matching section is found.\n */\nexport function extractPackageContentFromRootFile(content: string, packageName: string): string | null {\n if (!content || !packageName) return null;\n\n const openRe = buildOpenMarkerRegex(packageName);\n const closeRe = CLOSE_MARKER_REGEX;\n\n const openMatch = openRe.exec(content);\n if (!openMatch) return null;\n\n const startIndex = openMatch.index + openMatch[0].length;\n const rest = content.slice(startIndex);\n const closeMatch = closeRe.exec(rest);\n if (!closeMatch) return null;\n\n const endIndexInRest = closeMatch.index; // relative to rest\n const extracted = rest.slice(0, endIndexInRest);\n return extracted.trim();\n}\n\n/**\n * Extract the section body for a specific package from marker-wrapped content.\n * Supports markers in the format:\n * <!-- package: <name> --> ... <!-- -->\n * Returns null if no section is found.\n */\nexport function extractPackageSection(\n content: string,\n packageName: string\n): { sectionBody: string } | null {\n if (!content || !packageName) return null;\n\n const openRe = buildOpenMarkerRegex(packageName);\n const closeRe = CLOSE_MARKER_REGEX;\n\n const openMatch = openRe.exec(content);\n if (!openMatch) return null;\n\n const startIndex = openMatch.index + openMatch[0].length;\n const rest = content.slice(startIndex);\n const closeMatch = closeRe.exec(rest);\n if (!closeMatch) return null;\n\n const sectionBody = rest.slice(0, closeMatch.index).trim();\n\n return { sectionBody };\n}\n\n/**\n * Extract all package sections from a root file content.\n * Returns a map of packageName \u2192 content.\n */\nexport function extractAllPackageSections(content: string): Map<string, string> {\n const sections = new Map<string, string>();\n \n if (!content) {\n return sections;\n }\n \n // Clone the global regex to avoid cross-call lastIndex interference\n const packagePattern = new RegExp(FORMULA_SECTION_GLOBAL_REGEX);\n \n let match: RegExpExecArray | null;\n while ((match = packagePattern.exec(content)) !== null) {\n const packageName = match[1].trim();\n const sectionContent = match[2].trim();\n sections.set(packageName, sectionContent);\n }\n \n return sections;\n}\n", "/**\n * Root File Content Merger Utility\n * Handles marker-based content merging for root files (AGENTS.md, CLAUDE.md, etc.)\n */\n\nimport { buildOpenMarker, buildOpenMarkerRegex, CLOSE_MARKER, CLOSE_MARKER_REGEX } from './root-file-extractor.js';\n\n/**\n * Merge package-specific content into a root file while preserving all other content.\n * Finds the package section between markers and replaces it, or appends if not found.\n * \n * @param existingContent - The current content of the root file (or empty string)\n * @param packageName - Name of the package to merge\n * @param newContent - Section body to insert between the markers (without markers)\n * @returns Updated root file content with package section merged\n */\nexport function mergePackageContentIntoRootFile(\n existingContent: string,\n packageName: string,\n newContent: string\n): string {\n if (!packageName) {\n throw new Error('Package name is required for merging');\n }\n\n const openMarker = buildOpenMarker(packageName);\n const closeMarker = CLOSE_MARKER;\n \n // Create regex to find existing package section\n const openRe = buildOpenMarkerRegex(packageName);\n const closeRe = CLOSE_MARKER_REGEX;\n\n const openMatch = openRe.exec(existingContent);\n \n if (!openMatch) {\n // Package section doesn't exist - append it\n const separator = existingContent.trim() ? '\\n\\n' : '';\n return existingContent.trim() + separator + openMarker + '\\n' + newContent.trim() + '\\n' + closeMarker + '\\n';\n }\n\n // Package section exists - check if it already has the correct content\n const beforeSection = existingContent.substring(0, openMatch.index);\n const afterMarkerIndex = openMatch.index + openMatch[0].length;\n const restContent = existingContent.substring(afterMarkerIndex);\n\n const closeMatch = closeRe.exec(restContent);\n\n if (!closeMatch) {\n // Malformed - missing closing marker, append new section at end\n const separator = existingContent.trim() ? '\\n\\n' : '';\n return existingContent.trim() + separator + openMarker + '\\n' + newContent.trim() + '\\n' + closeMarker + '\\n';\n }\n\n // Extract the existing section content\n const existingSectionBody = restContent.substring(0, closeMatch.index).trim();\n const afterCloseMarkerIndex = closeMatch.index + closeMatch[0].length;\n const afterSection = restContent.substring(afterCloseMarkerIndex);\n\n // Check if the existing section already has the correct content and marker\n // Use component-wise comparison to avoid issues with whitespace formatting differences\n const existingOpenMarker = openMatch[0];\n const hasCorrectMarker = existingOpenMarker === openMarker;\n const hasCorrectContent = existingSectionBody === newContent.trim();\n const hasCloseMarker = true; // We already verified closeMatch exists\n\n if (hasCorrectMarker && hasCorrectContent && hasCloseMarker) {\n // Existing section is already correct, return unchanged\n return existingContent;\n }\n\n // Replace the section content\n return beforeSection + openMarker + '\\n' + newContent.trim() + '\\n' + closeMarker + afterSection;\n}\n\n", "/**\n * Flow-Based Index Installer\n * \n * INFRASTRUCTURE LAYER: Workspace index integration\n * \n * Wraps flow-based-installer.ts with workspace index management.\n * This is the primary entry point for production installations.\n * \n * Delegates to flow-based-installer.ts for core flow execution,\n * then updates workspace index with file mappings.\n * \n * Hierarchy:\n * Command Layer -> install-flow.ts -> THIS FILE -> flow-based-installer.ts\n */\n\nimport { join, relative } from 'path';\nimport { promises as fs } from 'fs';\nimport { readLockfile, writeLockfile } from '../../utils/lockfile-yml.js';\nimport type { LockfilePackage } from '../../types/lockfile.js';\nimport { isRegistryPath } from '../source-mutability.js';\nimport {\n installPackageWithFlows,\n type FlowInstallContext\n} from './flow-based-installer.js';\nimport { resolvePackageContentRoot } from './local-source-resolution.js';\nimport { logger } from '../../utils/logger.js';\nimport { formatPathForYaml } from '../../utils/path-resolution.js';\nimport { sortMapping } from '../../utils/package-index-yml.js';\nimport { deduplicateTargets } from '../../utils/workspace-index-helpers.js';\nimport {\n readWorkspaceIndex,\n writeWorkspaceIndex,\n type WorkspaceIndexRecord\n} from '../../utils/workspace-index-yml.js';\nimport { exists } from '../../utils/fs.js';\nimport type { Platform } from '../platforms.js';\nimport type { InstallOptions } from '../../types/index.js';\nimport type { WorkspaceIndexFileMapping } from '../../types/workspace-index.js';\nimport {\n buildOwnershipContext,\n type RelocatedFile,\n type OwnershipContext\n} from './conflicts/file-conflict-resolver.js';\nimport { removeStaleFiles } from './stale-file-cleanup.js';\nimport { normalizePathForProcessing } from '../../utils/path-normalization.js';\nimport type { IndexWriteCollector } from './wave-resolver/index-write-collector.js';\nimport type { IndexSourceType } from '../../constants/index.js';\nimport type { InstallScope } from '../../types/workspace-index.js';\nimport { createContextFromFormat } from '../conversion-context/creation.js';\nimport { detectFormatWithContextFromDirectory } from './helpers/format-detection.js';\nimport {\n logConflictMessages,\n logErrorMessages\n} from './helpers/result-logging.js';\nimport {\n collectConflictMessages,\n collectErrorMessages\n} from './helpers/result-aggregation.js';\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface IndexInstallResult {\n installed: number;\n updated: number;\n deleted: number;\n skipped: number;\n files: string[];\n installedFiles: string[];\n updatedFiles: string[];\n deletedFiles: string[];\n /** True when namespace conflict resolution was triggered for this package */\n namespaced?: boolean;\n /** The namespace slug used for this package (persisted to workspace index) */\n namespaceSlug?: string;\n /** Paths of files that were actually installed under namespace conflict resolution */\n namespacedFiles?: string[];\n /** Files that were physically relocated on disk during namespace resolution */\n relocatedFiles?: RelocatedFile[];\n /** Absolute paths of files that were auto-claimed (content identical, unowned on disk) */\n claimedFiles?: string[];\n}\n\ninterface PackageIndexRecord {\n path: string;\n packageName: string;\n workspace: {\n version: string;\n hash?: string;\n };\n files: Record<string, (string | WorkspaceIndexFileMapping)[]>;\n}\n\n// ============================================================================\n// Main Installation Function\n// ============================================================================\n\n/**\n * Install a package using flow-based installation\n * This is the new entry point that replaces installPackageByIndex\n */\nexport async function installPackageByIndexWithFlows(\n cwd: string,\n packageName: string,\n version: string,\n platforms: Platform[],\n options: InstallOptions,\n contentRoot?: string,\n packageFormat?: any, // Optional format metadata from plugin transformer\n marketplaceMetadata?: { // Optional marketplace source metadata\n url: string;\n commitSha: string;\n pluginName: string;\n },\n matchedPattern?: string, // Phase 4: Pattern from base detection\n resourceVersion?: string, // Resource-specific version (for agents/skills)\n originalContentRoot?: string, // Original source path before conversion (for index)\n forceOverwrite?: boolean, // Phase 5: Package was confirmed for overwrite at package-level conflict phase\n prompt?: import('../ports/prompt.js').PromptPort,\n indexWriteCollector?: IndexWriteCollector,\n sharedOwnershipContext?: import('./conflicts/file-conflict-resolver.js').OwnershipContext,\n sourceType?: IndexSourceType,\n installScope?: InstallScope\n): Promise<IndexInstallResult> {\n logger.debug(`Installing ${packageName}@${version} with flows for platforms: ${platforms.join(', ')}`);\n\n // Resolve package root\n const resolvedContentRoot = contentRoot ?? await resolvePackageContentRoot({\n cwd,\n packageName,\n version\n });\n\n // Aggregate results across all platforms\n const aggregatedResult: IndexInstallResult = {\n installed: 0,\n updated: 0,\n deleted: 0,\n skipped: 0,\n files: [],\n installedFiles: [],\n updatedFiles: [],\n deletedFiles: []\n };\n\n const allTargetPaths = new Set<string>();\n const namespacedTargetPaths: string[] = [];\n let resolvedNamespaceSlug: string | undefined;\n const claimedTargetPaths: string[] = [];\n const allConflicts: string[] = [];\n const allErrors: string[] = [];\n const fileMapping: Record<string, (string | WorkspaceIndexFileMapping)[]> = {};\n \n // Helper to normalize source keys: merge variants that refer to the same file\n // This handles cases where .mcp.json and mcp.jsonc are discovered separately but refer to the same file\n const sourceKeyCache = new Map<string, string>(); // Maps discovered source -> canonical key\n const normalizeSourceKey = async (source: string): Promise<string> => {\n // Check cache first\n if (sourceKeyCache.has(source)) {\n return sourceKeyCache.get(source)!;\n }\n \n const sourceAbs = join(resolvedContentRoot, source);\n if (!await exists(sourceAbs)) {\n sourceKeyCache.set(source, source);\n return source;\n }\n \n // Check if any already-seen source key refers to the same file\n // When both .mcp.json and mcp.jsonc exist and refer to the same file, prefer .mcp.json\n // (since apply command uses .mcp.json, it's the canonical form)\n let canonicalKey = source;\n for (const [cachedSource, cachedCanonical] of sourceKeyCache.entries()) {\n if (cachedSource === source) continue;\n const cachedAbs = join(resolvedContentRoot, cachedSource);\n if (await exists(cachedAbs)) {\n try {\n const sourceStat = await fs.stat(sourceAbs);\n const cachedStat = await fs.stat(cachedAbs);\n // Same file if same inode (Unix) or same size+mtime\n if (sourceStat.ino === cachedStat.ino || \n (sourceStat.size === cachedStat.size && \n Math.abs(sourceStat.mtimeMs - cachedStat.mtimeMs) < 1000)) {\n // Prefer dot-prefixed variant (.mcp.json) as canonical key when both exist\n // This matches apply command behavior\n if (source.startsWith('.') && !cachedCanonical.startsWith('.')) {\n canonicalKey = source;\n // Update cached entry to use dot-prefixed variant\n sourceKeyCache.set(cachedSource, source);\n } else if (!source.startsWith('.') && cachedCanonical.startsWith('.')) {\n canonicalKey = cachedCanonical;\n } else {\n canonicalKey = cachedCanonical;\n }\n sourceKeyCache.set(source, canonicalKey);\n return canonicalKey;\n }\n } catch {\n // If stat fails, treat as different files\n }\n }\n }\n \n // No match found, use source as canonical key\n sourceKeyCache.set(source, source);\n return source;\n };\n\n // Snapshot previous file mapping for stale file detection\n let previousIndexRecord: WorkspaceIndexRecord | null = null;\n let previousFiles: Record<string, (string | WorkspaceIndexFileMapping)[]> | null = null;\n if (!options.dryRun) {\n try {\n previousIndexRecord = await readWorkspaceIndex(cwd);\n const packageEntry = previousIndexRecord.index.packages?.[packageName];\n if (packageEntry?.files && Object.keys(packageEntry.files).length > 0) {\n previousFiles = packageEntry.files;\n }\n } catch {\n // Fresh workspace or read failure \u2014 no previous state to diff\n }\n }\n\n // When installing to multiple platforms that share target paths (e.g. amp, kimi, replit\n // all use .agents/skills/), build a shared ownership context and track paths written this\n // run so later platforms don't treat earlier writes as \"exists-unowned\" and incorrectly\n // namespace, producing duplicate files.\n let effectiveOwnershipContext: OwnershipContext | undefined;\n if (platforms.length > 1) {\n const ctx = sharedOwnershipContext ?? await buildOwnershipContext(\n cwd, packageName, previousFiles ? { files: previousFiles } : null, previousIndexRecord\n );\n ctx.currentRunWrittenPaths = ctx.currentRunWrittenPaths ?? new Set();\n effectiveOwnershipContext = ctx;\n }\n\n // Execute flows for each platform\n for (const platform of platforms) {\n // Create conversion context from format or detect it using shared utility\n let conversionContext;\n let format = packageFormat;\n \n if (!packageFormat) {\n // Detect format and context from directory\n const result = await detectFormatWithContextFromDirectory(resolvedContentRoot);\n format = result.format;\n conversionContext = result.context;\n } else {\n // Create context from existing format\n conversionContext = createContextFromFormat(packageFormat);\n }\n \n const installContext: FlowInstallContext = {\n packageName,\n packageRoot: resolvedContentRoot,\n workspaceRoot: cwd,\n platform,\n packageVersion: version,\n priority: 0, // Priority is calculated from dependency graph during multi-package installs\n dryRun: options.dryRun ?? false,\n packageFormat: format,\n conversionContext,\n // Phase 4: Pass resource filtering info\n matchedPattern,\n prompt,\n // Parallel install support; use shared context with currentRunWrittenPaths when multi-platform\n sharedOwnershipContext: effectiveOwnershipContext ?? sharedOwnershipContext,\n indexWriteCollector,\n previousIndexRecord: previousIndexRecord ?? undefined\n };\n\n try {\n const result = await installPackageWithFlows(installContext, options, forceOverwrite ?? false);\n\n // Aggregate target paths + per-source mapping for workspace index\n const pathsThisPlatform = result.targetPaths ?? [];\n for (const absTarget of pathsThisPlatform) {\n allTargetPaths.add(absTarget);\n // Track workspace-relative paths written this run so later platforms (e.g. kimi, replit\n // after amp) don't treat them as exists-unowned and incorrectly namespace.\n if (effectiveOwnershipContext?.currentRunWrittenPaths) {\n const rel = relative(cwd, absTarget);\n effectiveOwnershipContext.currentRunWrittenPaths.add(normalizePathForProcessing(rel));\n }\n }\n // Merge file mappings with normalization\n for (const [source, targets] of Object.entries(result.fileMapping ?? {})) {\n const normalizedSource = await normalizeSourceKey(source);\n \n fileMapping[normalizedSource] = deduplicateTargets(\n fileMapping[normalizedSource] ?? [], targets\n );\n }\n\n // Aggregate statistics\n aggregatedResult.installed += result.filesProcessed;\n aggregatedResult.updated += 0; // Flow executor doesn't distinguish new vs updated yet\n aggregatedResult.skipped += result.filesProcessed - result.filesWritten;\n\n // Collect conflicts and errors using shared utilities\n collectConflictMessages(allConflicts, result.conflicts);\n collectErrorMessages(allErrors, result.errors);\n\n // Propagate namespace metadata from the strategy result\n if (result.namespaced) {\n namespacedTargetPaths.push(...pathsThisPlatform);\n if (result.namespaceSlug) {\n resolvedNamespaceSlug = result.namespaceSlug;\n }\n }\n if (result.claimedFiles && result.claimedFiles.length > 0) {\n claimedTargetPaths.push(...result.claimedFiles.map(rel => join(cwd, rel)));\n }\n if (result.relocatedFiles && result.relocatedFiles.length > 0) {\n if (!aggregatedResult.relocatedFiles) {\n aggregatedResult.relocatedFiles = [];\n }\n aggregatedResult.relocatedFiles.push(...result.relocatedFiles);\n }\n\n // Log results per platform\n if (result.filesProcessed > 0) {\n logger.info(\n `${platform}: processed ${result.filesProcessed} files` +\n (options.dryRun ? ' (dry run)' : `, wrote ${result.filesWritten} files`)\n );\n }\n\n } catch (error) {\n logger.error(`Failed to install ${packageName} for platform ${platform}: ${error}`);\n allErrors.push(`${platform}: ${error}`);\n }\n }\n\n // Log aggregated results using shared utilities\n logConflictMessages(allConflicts);\n logErrorMessages(allErrors);\n\n // Remove stale files from previous installation\n if (previousFiles && !options.dryRun) {\n const staleResult = await removeStaleFiles({\n cwd,\n packageName,\n previousFiles,\n newFileMapping: fileMapping,\n platforms,\n dryRun: false,\n matchedPattern,\n ownershipContext: effectiveOwnershipContext ?? sharedOwnershipContext,\n });\n\n aggregatedResult.deleted += staleResult.deleted.length;\n aggregatedResult.deletedFiles.push(...staleResult.deleted);\n\n if (staleResult.deleted.length > 0 || staleResult.updated.length > 0) {\n logger.info(\n `Stale cleanup for ${packageName}: removed ${staleResult.deleted.length} files, ` +\n `updated ${staleResult.updated.length} merged files`\n );\n }\n }\n\n // Update workspace index if not dry-run\n if (!options.dryRun) {\n // For resource-scoped installs, store the most specific source path possible:\n // - file match: <root>/<file>\n // - directory match: <root>/<dir> (matchedPattern like \"dir/**\")\n // This keeps index paths and names aligned for resource installs.\n const isGlob = Boolean(matchedPattern && (matchedPattern.includes('*') || matchedPattern.includes('?') || matchedPattern.includes('[')));\n const isDirGlob = Boolean(matchedPattern && matchedPattern.replace(/\\\\/g, '/').endsWith('/**'));\n const dirGlobPrefix = isDirGlob\n ? matchedPattern!.replace(/\\\\/g, '/').replace(/\\/\\*\\*$/, '')\n : undefined;\n \n // Use originalContentRoot if provided (for converted packages), otherwise use resolvedContentRoot\n const basePathForIndex = originalContentRoot || resolvedContentRoot;\n \n const indexSourcePath = (matchedPattern && !isGlob)\n ? join(basePathForIndex, matchedPattern)\n : (dirGlobPrefix && dirGlobPrefix.length > 0)\n ? join(basePathForIndex, dirGlobPrefix)\n : basePathForIndex;\n\n await updateWorkspaceIndexForFlows(\n cwd,\n packageName,\n version,\n indexSourcePath,\n fileMapping,\n marketplaceMetadata,\n resourceVersion,\n indexWriteCollector,\n platforms,\n resolvedNamespaceSlug,\n sourceType,\n installScope\n );\n }\n\n // Set result files\n aggregatedResult.files = Array.from(allTargetPaths);\n aggregatedResult.installedFiles = Array.from(allTargetPaths);\n if (namespacedTargetPaths.length > 0) {\n aggregatedResult.namespaced = true;\n aggregatedResult.namespaceSlug = resolvedNamespaceSlug;\n aggregatedResult.namespacedFiles = namespacedTargetPaths;\n }\n if (claimedTargetPaths.length > 0) {\n aggregatedResult.claimedFiles = claimedTargetPaths;\n }\n\n return aggregatedResult;\n}\n\n// ============================================================================\n// Workspace Index Management\n// ============================================================================\n\n/**\n * Update workspace index with flow-based installation results\n */\nasync function updateWorkspaceIndexForFlows(\n cwd: string,\n packageName: string,\n version: string,\n packagePath: string,\n fileMapping: Record<string, (string | WorkspaceIndexFileMapping)[]>,\n marketplaceMetadata?: {\n url: string;\n commitSha: string;\n pluginName: string;\n },\n resourceVersion?: string,\n indexWriteCollector?: IndexWriteCollector,\n platforms?: Platform[],\n namespaceSlug?: string,\n sourceType?: IndexSourceType,\n installScope?: InstallScope\n): Promise<void> {\n const effectiveVersion = resourceVersion ?? version;\n\n // Defer to collector if present (parallel install mode)\n if (indexWriteCollector) {\n indexWriteCollector.recordPackageUpdate({\n packageName,\n path: packagePath,\n version: effectiveVersion,\n files: fileMapping,\n marketplace: marketplaceMetadata,\n platforms,\n namespace: namespaceSlug,\n sourceType,\n installScope,\n });\n return;\n }\n\n try {\n const wsRecord = await readWorkspaceIndex(cwd);\n \n // Initialize packages map if needed\n wsRecord.index.packages = wsRecord.index.packages ?? {};\n \n // Convert file mapping to workspace index format\n const files: Record<string, (string | WorkspaceIndexFileMapping)[]> = {};\n for (const [source, targets] of Object.entries(fileMapping)) {\n files[source] = targets;\n }\n \n // Convert to workspace-relative path if under workspace, then apply tilde notation for global paths\n const formattedPath = formatPathForYaml(packagePath, cwd);\n \n // Apply version fallback chain: resourceVersion > version > undefined\n // This ensures agents/skills can have individual versions\n const effectiveVersion = resourceVersion ?? version;\n \n // Update package entry\n const packageEntry: any = {\n ...wsRecord.index.packages[packageName],\n path: formattedPath,\n version: effectiveVersion,\n files: sortMapping(files)\n };\n \n // Add namespace slug if present\n if (namespaceSlug) {\n packageEntry.namespace = namespaceSlug;\n }\n\n // Add platforms if present\n if (platforms && platforms.length > 0) {\n packageEntry.platforms = platforms;\n }\n\n // Add marketplace metadata if present\n if (marketplaceMetadata) {\n packageEntry.marketplace = marketplaceMetadata;\n }\n\n // Add source type if present\n if (sourceType) {\n packageEntry.sourceType = sourceType;\n }\n\n // Add install scope if present\n if (installScope) {\n packageEntry.installScope = installScope;\n }\n\n wsRecord.index.packages[packageName] = packageEntry;\n \n await writeWorkspaceIndex(wsRecord);\n logger.debug(`Updated workspace index for ${packageName}@${version}`);\n\n // Write resolution metadata to lockfile (best-effort)\n try {\n const lockRecord = await readLockfile(cwd);\n const lockEntry: LockfilePackage = {\n version: effectiveVersion,\n dependencies: packageEntry.dependencies,\n marketplace: marketplaceMetadata,\n };\n // Add source provenance for non-registry packages\n if (packagePath && !isRegistryPath(packagePath)) {\n lockEntry.base = packagePath;\n }\n lockRecord.lockfile.packages[packageName] = lockEntry;\n await writeLockfile(lockRecord);\n } catch (lockError) {\n logger.debug(`Failed to update lockfile for ${packageName}: ${lockError}`);\n }\n } catch (error) {\n logger.warn(`Failed to update workspace index for ${packageName}: ${error}`);\n }\n}\n\n\n// ============================================================================\n// Helper Functions\n// ============================================================================\n\n/**\n * Read package index from workspace\n */\nasync function readPackageIndex(\n cwd: string,\n packageName: string\n): Promise<PackageIndexRecord | null> {\n const record = await readWorkspaceIndex(cwd);\n const entry = record.index.packages?.[packageName];\n if (!entry) return null;\n \n return {\n path: entry.path ?? '',\n packageName,\n workspace: {\n version: entry.version ?? '',\n hash: undefined\n },\n files: entry.files ?? {}\n };\n}\n\n/**\n * Write package index to workspace\n */\nasync function writePackageIndex(\n record: PackageIndexRecord,\n cwd: string\n): Promise<void> {\n const wsRecord = await readWorkspaceIndex(cwd);\n wsRecord.index.packages = wsRecord.index.packages ?? {};\n \n const entry = wsRecord.index.packages[record.packageName];\n const rawPath = entry?.path ?? record.path ?? '';\n \n if (!rawPath) {\n logger.warn(\n `Skipping workspace index write for ${record.packageName}: source path is unknown`\n );\n return;\n }\n \n // Convert to workspace-relative path if under workspace, then apply tilde notation for global paths\n const pathToUse = formatPathForYaml(rawPath, cwd);\n \n wsRecord.index.packages[record.packageName] = {\n ...entry,\n path: pathToUse,\n version: entry?.version ?? record.workspace?.version,\n files: sortMapping(record.files ?? {})\n };\n \n await writeWorkspaceIndex(wsRecord);\n}\n", "/**\n * Lockfile I/O utilities.\n *\n * Follows the same patterns as workspace-index-yml.ts:\n * - Atomic write via temp file + rename\n * - Defensive parsing with sanitization\n * - Graceful handling of absent files\n */\n\nimport { promises as fs } from 'fs';\nimport { dirname, join, resolve as pathResolve } from 'path';\nimport * as yaml from 'js-yaml';\nimport { FILE_PATTERNS } from '../constants/index.js';\nimport { getLocalOpenPackageDir } from './paths.js';\nimport { ensureDir, readTextFile } from './fs.js';\nimport { FileSystemError } from './errors.js';\nimport { logger } from './logger.js';\nimport { sortAndDedupeStrings } from './collections.js';\nimport type { Lockfile, LockfilePackage } from '../types/lockfile.js';\n\nconst CURRENT_LOCKFILE_VERSION = 1;\nconst LOCKFILE_HEADER = '# This file is auto-generated by opkg. Do not edit manually.';\n\nexport interface LockfileRecord {\n path: string;\n lockfile: Lockfile;\n}\n\nfunction emptyLockfile(): Lockfile {\n return { lockfileVersion: CURRENT_LOCKFILE_VERSION, packages: {} };\n}\n\nexport function getLockfilePath(targetDir: string): string {\n return join(getLocalOpenPackageDir(targetDir), FILE_PATTERNS.OPENPACKAGE_LOCK);\n}\n\nfunction sanitizeLockfilePackage(entry: unknown): LockfilePackage | null {\n if (typeof entry !== 'object' || entry === null) return null;\n\n const raw = entry as Record<string, unknown>;\n const pkg: LockfilePackage = {};\n\n if (typeof raw.version === 'string' && raw.version.trim().length > 0) {\n pkg.version = raw.version.trim();\n }\n\n if (Array.isArray(raw.dependencies)) {\n pkg.dependencies = raw.dependencies.filter(\n (d): d is string => typeof d === 'string' && d.trim().length > 0\n );\n }\n\n if (typeof raw.base === 'string' && raw.base.trim().length > 0) {\n pkg.base = raw.base.trim();\n }\n\n if (typeof raw.path === 'string' && raw.path.trim().length > 0) {\n pkg.path = raw.path.trim();\n }\n\n if (typeof raw.url === 'string' && raw.url.trim().length > 0) {\n pkg.url = raw.url.trim();\n }\n\n if (typeof raw.ref === 'string' && raw.ref.trim().length > 0) {\n pkg.ref = raw.ref.trim();\n }\n\n if (typeof raw.marketplace === 'object' && raw.marketplace !== null) {\n const m = raw.marketplace as Record<string, unknown>;\n if (typeof m.url === 'string' && typeof m.commitSha === 'string' && typeof m.pluginName === 'string') {\n pkg.marketplace = { url: m.url, commitSha: m.commitSha, pluginName: m.pluginName };\n }\n }\n\n return pkg;\n}\n\nfunction sanitizeLockfileData(data: unknown): Lockfile | null {\n if (typeof data !== 'object' || data === null) return null;\n\n const raw = data as Record<string, unknown>;\n\n if (raw.lockfileVersion !== undefined && raw.lockfileVersion !== CURRENT_LOCKFILE_VERSION) {\n logger.warn(`Unsupported lockfile version: ${raw.lockfileVersion} (expected ${CURRENT_LOCKFILE_VERSION})`);\n return null;\n }\n\n const packagesSection = raw.packages;\n if (typeof packagesSection !== 'object' || packagesSection === null) {\n return emptyLockfile();\n }\n\n const packages: Record<string, LockfilePackage> = {};\n for (const [pkgName, pkgEntry] of Object.entries(packagesSection as Record<string, unknown>)) {\n if (typeof pkgName !== 'string' || pkgName.trim().length === 0) continue;\n const sanitized = sanitizeLockfilePackage(pkgEntry);\n if (sanitized) {\n packages[pkgName] = sanitized;\n }\n }\n\n return { lockfileVersion: CURRENT_LOCKFILE_VERSION, packages };\n}\n\n// Module-level cache keyed by resolved targetDir\nconst _lockfileCache = new Map<string, LockfileRecord>();\n\nexport function invalidateLockfileCache(targetDir?: string): void {\n if (targetDir) {\n _lockfileCache.delete(pathResolve(targetDir));\n } else {\n _lockfileCache.clear();\n }\n}\n\n/**\n * Read the lockfile for a workspace.\n * Results are cached per targetDir. Call invalidateLockfileCache() after writes.\n */\nexport async function readLockfile(targetDir: string): Promise<LockfileRecord> {\n const resolvedDir = pathResolve(targetDir);\n const cached = _lockfileCache.get(resolvedDir);\n if (cached) return cached;\n\n const lockfilePath = getLockfilePath(targetDir);\n\n try {\n const content = await readTextFile(lockfilePath);\n const parsed = yaml.load(content) as unknown;\n const sanitized = sanitizeLockfileData(parsed);\n if (!sanitized) {\n logger.warn(`Invalid lockfile detected at ${lockfilePath}, returning empty.`);\n const empty = { path: lockfilePath, lockfile: emptyLockfile() };\n _lockfileCache.set(resolvedDir, empty);\n return empty;\n }\n const result = { path: lockfilePath, lockfile: sanitized };\n _lockfileCache.set(resolvedDir, result);\n return result;\n } catch (error: any) {\n if (error?.cause?.code !== 'ENOENT' && error?.code !== 'ENOENT') {\n logger.warn(`Failed to read lockfile at ${lockfilePath}: ${error}`);\n }\n const empty = { path: lockfilePath, lockfile: emptyLockfile() };\n _lockfileCache.set(resolvedDir, empty);\n return empty;\n }\n}\n\nexport async function writeLockfile(record: LockfileRecord): Promise<void> {\n const lockfilePath = record.path;\n const packages = record.lockfile.packages ?? {};\n\n const outputPackages: Record<string, LockfilePackage> = {};\n for (const pkgName of Object.keys(packages)) {\n const pkg = packages[pkgName];\n const out: LockfilePackage = {};\n\n if (pkg.version) out.version = pkg.version;\n if (pkg.dependencies && pkg.dependencies.length > 0) {\n out.dependencies = sortAndDedupeStrings(pkg.dependencies);\n }\n if (pkg.marketplace) out.marketplace = pkg.marketplace;\n if (pkg.base) out.base = pkg.base;\n if (pkg.path) out.path = pkg.path;\n if (pkg.url) out.url = pkg.url;\n if (pkg.ref) out.ref = pkg.ref;\n\n outputPackages[pkgName] = out;\n }\n\n await ensureDir(dirname(lockfilePath));\n\n const body = yaml.dump(\n { lockfileVersion: CURRENT_LOCKFILE_VERSION, packages: outputPackages },\n { lineWidth: 120, sortKeys: true }\n );\n\n const serialized = `${LOCKFILE_HEADER}\\n\\n${body}`;\n const tempPath = `${lockfilePath}.tmp`;\n try {\n await fs.writeFile(tempPath, serialized, 'utf8');\n await fs.rename(tempPath, lockfilePath);\n // Cache is keyed by targetDir (parent of .openpackage/); invalidate correctly\n invalidateLockfileCache(dirname(dirname(lockfilePath)));\n } catch (error) {\n try { await fs.unlink(tempPath); } catch { /* ignore cleanup error */ }\n throw new FileSystemError(`Failed to write lockfile: ${lockfilePath}`, { path: lockfilePath, error });\n }\n}\n\nexport async function removeLockfileEntry(targetDir: string, packageName: string): Promise<void> {\n const record = await readLockfile(targetDir);\n if (!record.lockfile.packages[packageName]) return;\n\n delete record.lockfile.packages[packageName];\n\n // Clean stale references from other packages' dependency lists\n for (const entry of Object.values(record.lockfile.packages)) {\n if (entry.dependencies) {\n entry.dependencies = entry.dependencies.filter(d => d !== packageName);\n if (entry.dependencies.length === 0) delete entry.dependencies;\n }\n }\n\n await writeLockfile(record);\n}\n", "/**\n * Shared collection utilities.\n */\n\n/** Deduplicate and sort a string array. */\nexport function sortAndDedupeStrings(values: string[]): string[] {\n return Array.from(new Set(values)).sort();\n}\n", "/**\n * Format Conversion Installation Strategy\n * \n * Converts package from source format \u2192 universal \u2192 target platform format.\n * \n * This strategy performs per-platform conversion to handle conditional flows\n * that depend on the target platform (e.g., `when: { \"$eq\": [\"$$platform\", \"claude\"] }`).\n * \n * Each target platform gets its own conversion pass with proper context variables:\n * - $$platform = target platform (for conditional evaluation)\n * - $$source = original source format (preserved through conversion)\n * \n * Used when source platform \u2260 target platform.\n */\n\nimport { join, relative } from 'path';\nimport type { Platform } from '../../platforms.js';\nimport type { Package } from '../../../types/index.js';\nimport type { PackageConversionContext } from '../../../types/conversion-context.js';\nimport type { PackageFormat } from '../format-detector.js';\nimport type { InstallOptions } from '../../../types/index.js';\nimport type { FlowInstallContext, FlowInstallResult } from './types.js';\nimport { BaseStrategy } from './base-strategy.js';\nimport { needsConversion, detectPackageFormat } from '../format-detector.js';\nimport { createPlatformConverter } from '../../flows/platform-converter.js';\nimport { walkFiles } from '../../../utils/file-walker.js';\nimport { readTextFile } from '../../../utils/fs.js';\nimport { logger } from '../../../utils/logger.js';\nimport {\n createTempPackageDirectory,\n writeTempPackageFiles,\n writeConversionContext,\n cleanupTempDirectory\n} from './helpers/temp-directory.js';\nimport { createContextFromFormat } from '../../conversion-context/index.js';\nimport { FlowBasedInstallStrategy } from './flow-based-strategy.js';\nimport { normalizeConvertedMatchedPattern } from '../resource-pattern-normalization.js';\n\n/**\n * Format Conversion Installation Strategy\n * \n * Performs per-platform conversion to ensure conditional flows have\n * correct context variables during transformation.\n */\nexport class ConversionInstallStrategy extends BaseStrategy {\n readonly name = 'conversion';\n \n canHandle(format: PackageFormat, platform: Platform): boolean {\n return needsConversion(format, platform);\n }\n \n async install(\n context: FlowInstallContext,\n options?: InstallOptions,\n forceOverwrite: boolean = false\n ): Promise<FlowInstallResult> {\n const { packageName, packageRoot, workspaceRoot, platform, dryRun } = context;\n \n this.logStrategySelection(context);\n \n logger.info(`Converting ${packageName} from ${context.packageFormat?.platform || 'unknown'} to ${platform} format`);\n \n try {\n // Phase 1: Load package files\n const packageFiles = await this.loadPackageFiles(packageRoot);\n \n // Phase 2: Create package object with original format metadata\n const pkg: Package = {\n metadata: {\n name: packageName,\n version: context.packageVersion\n },\n files: packageFiles,\n _format: context.packageFormat || await this.detectFormat(packageRoot)\n };\n \n // Phase 3: Create conversion context and convert FOR the specific target platform\n // This ensures conditional flows like `when: { \"$eq\": [\"$$platform\", \"claude\"] }`\n // have the correct context during conversion\n const conversionContext = createContextFromFormat(pkg._format!);\n \n const converter = createPlatformConverter(workspaceRoot);\n const conversionResult = await converter.convert(\n pkg,\n conversionContext, // Pass conversion context\n platform, // Target platform\n { dryRun }\n );\n \n if (!conversionResult.success || !conversionResult.convertedPackage) {\n logger.error('Package conversion failed', {\n package: packageName,\n stages: conversionResult.stages\n });\n \n return this.createErrorResult(\n context,\n new Error('Conversion failed'),\n 'Failed to convert package format'\n );\n }\n \n logger.info(\n `Conversion to universal format complete (${conversionResult.stages.length} stages), ` +\n `now applying ${platform} platform flows`\n );\n \n // Phase 4: Write converted files to temp directory and install\n return await this.installConvertedPackage(\n conversionResult.convertedPackage,\n conversionResult.updatedContext || conversionContext,\n context,\n options,\n forceOverwrite\n );\n \n } catch (error) {\n logger.error('Conversion installation failed', { packageName, error });\n return this.createErrorResult(\n context,\n error as Error,\n `Failed to install with conversion: ${(error as Error).message}`\n );\n }\n }\n \n /**\n * Load all package files from directory\n */\n private async loadPackageFiles(packageRoot: string): Promise<Array<{ path: string; content: string }>> {\n const packageFiles: Array<{ path: string; content: string }> = [];\n \n for await (const sourcePath of walkFiles(packageRoot)) {\n const relativePath = relative(packageRoot, sourcePath);\n \n if (relativePath.startsWith('.openpackage/') || relativePath === 'openpackage.yml') {\n continue;\n }\n \n const content = await readTextFile(sourcePath);\n packageFiles.push({ path: relativePath, content, encoding: 'utf8' } as any);\n }\n \n return packageFiles;\n }\n \n /**\n * Detect package format from directory\n */\n private async detectFormat(packageRoot: string): Promise<PackageFormat> {\n const files: Array<{ path: string; content: string }> = [];\n \n for await (const fullPath of walkFiles(packageRoot)) {\n const relativePath = relative(packageRoot, fullPath);\n \n if (relativePath.startsWith('.git/') || relativePath === '.git') {\n continue;\n }\n \n files.push({ path: relativePath, content: '' });\n }\n \n return detectPackageFormat(files);\n }\n \n /**\n * Install converted package from temp directory with context\n */\n private async installConvertedPackage(\n convertedPackage: Package,\n conversionContext: PackageConversionContext,\n context: FlowInstallContext,\n options?: InstallOptions,\n forceOverwrite: boolean = false\n ): Promise<FlowInstallResult> {\n let tempPackageRoot: string | null = null;\n \n try {\n // Create temp directory\n tempPackageRoot = await createTempPackageDirectory();\n \n // Write converted files\n await writeTempPackageFiles(convertedPackage.files, tempPackageRoot);\n \n // Write conversion context (persists through temp directory)\n await writeConversionContext(conversionContext, tempPackageRoot);\n \n // Install from temp directory using flow-based installation\n const flowStrategy = new FlowBasedInstallStrategy();\n \n const convertedContext: FlowInstallContext = {\n ...context,\n packageRoot: tempPackageRoot,\n // Updated package format after conversion\n packageFormat: convertedPackage._format,\n // Pass updated conversion context\n conversionContext,\n matchedPattern: normalizeConvertedMatchedPattern(context.matchedPattern)\n };\n \n const installResult = await flowStrategy.install(convertedContext, options, forceOverwrite);\n \n // Cleanup temp directory\n await cleanupTempDirectory(tempPackageRoot);\n \n return installResult;\n \n } catch (error) {\n await cleanupTempDirectory(tempPackageRoot);\n \n logger.error('Failed to install converted package', { \n packageName: context.packageName, \n error \n });\n \n return this.createErrorResult(\n context,\n error as Error,\n `Failed to install converted package: ${(error as Error).message}`\n );\n }\n }\n}\n", "/**\n * Result Converter Module\n * \n * Utilities for converting execution results to installation results.\n */\n\nimport type { Platform } from '../../../platforms.js';\nimport type { FlowInstallResult } from '../types.js';\nimport { logger } from '../../../../utils/logger.js';\n\n/**\n * Convert execution result from flow coordinator to installation result format\n * \n * @param executionResult - Result from flow execution coordinator\n * @param packageName - Name of package being installed\n * @param platform - Target platform\n * @param dryRun - Whether this is a dry run\n * @returns Formatted installation result\n */\nexport function convertToInstallResult(\n executionResult: any,\n packageName: string,\n platform: Platform,\n dryRun: boolean\n): FlowInstallResult {\n const result: FlowInstallResult = {\n success: executionResult.success,\n filesProcessed: executionResult.filesProcessed,\n filesWritten: executionResult.filesWritten,\n conflicts: executionResult.conflicts.map((c: any) => ({\n targetPath: c.path,\n packages: [\n { packageName: c.winner, priority: 0, chosen: true },\n ...c.losers.map((loser: string) => ({\n packageName: loser,\n priority: 0,\n chosen: false\n }))\n ],\n message: `Conflict in ${c.path}: ${c.winner} overwrites ${c.losers.join(', ')}`\n })),\n errors: executionResult.errors,\n targetPaths: executionResult.targetPaths,\n fileMapping: executionResult.fileMapping\n };\n \n // Log results\n if (result.filesProcessed > 0) {\n logger.info(\n `Processed ${result.filesProcessed} files for ${packageName} on platform ${platform}` +\n (dryRun ? ' (dry run)' : `, wrote ${result.filesWritten} files`)\n );\n }\n \n return result;\n}\n\n/**\n * Create an empty install result (for early returns)\n */\nexport function createEmptyResult(): FlowInstallResult {\n return {\n success: true,\n filesProcessed: 0,\n filesWritten: 0,\n conflicts: [],\n errors: [],\n targetPaths: [],\n fileMapping: {},\n namespaced: false,\n relocatedFiles: []\n };\n}\n", "/**\n * Flow Helpers Module\n * \n * Utilities for retrieving and manipulating flows for installation strategies.\n */\n\nimport type { Flow } from '../../../../types/flows.js';\nimport type { Platform } from '../../../platforms.js';\nimport { getPlatformDefinition, getGlobalExportFlows } from '../../../platforms.js';\n\n/**\n * Get applicable flows for a platform, including global flows\n */\nexport function getApplicableFlows(platform: Platform, cwd: string): Flow[] {\n const flows: Flow[] = [];\n \n const globalExportFlows = getGlobalExportFlows(cwd);\n if (globalExportFlows && globalExportFlows.length > 0) {\n flows.push(...globalExportFlows);\n }\n \n const definition = getPlatformDefinition(platform, cwd);\n if (definition.export && definition.export.length > 0) {\n flows.push(...definition.export);\n }\n \n return flows;\n}\n", "/**\n * Result Logging Helpers\n * \n * Shared utilities for logging installation results, conflicts, and errors.\n * Provides consistent logging behavior across installation modules.\n */\n\nimport { logger } from '../../../utils/logger.js';\nimport { toTildePath } from '../../../utils/path-resolution.js';\nimport type { FlowInstallResult, FlowConflictReport, FlowInstallError } from '../strategies/types.js';\n\n/**\n * Log conflicts from FlowInstallResult\n * \n * @param conflicts - Array of conflict reports\n */\nexport function logConflicts(conflicts: FlowConflictReport[]): void {\n if (conflicts.length === 0) return;\n \n logger.warn(`Detected ${conflicts.length} conflicts during installation`);\n for (const conflict of conflicts) {\n const winner = conflict.packages.find(p => p.chosen);\n const loser = conflict.packages.find(p => !p.chosen);\n logger.warn(\n ` ${toTildePath(conflict.targetPath)}: ${winner?.packageName} (priority ${winner?.priority}) overwrites ${loser?.packageName}`\n );\n }\n}\n\n/**\n * Log errors from FlowInstallResult\n * \n * @param errors - Array of installation errors\n */\nexport function logErrors(errors: FlowInstallError[]): void {\n if (errors.length === 0) return;\n \n logger.error(`Encountered ${errors.length} errors during installation`);\n for (const error of errors) {\n logger.error(` ${error.sourcePath}: ${error.message}`);\n }\n}\n\n/**\n * Log conflict messages (string array format)\n * \n * Used by flow-index-installer which aggregates conflicts across platforms.\n * \n * @param conflicts - Array of conflict message strings\n */\nexport function logConflictMessages(conflicts: string[]): void {\n if (conflicts.length === 0) return;\n \n logger.warn(`Detected ${conflicts.length} conflicts during installation:`);\n for (const conflict of conflicts) {\n logger.warn(` ${conflict}`);\n }\n}\n\n/**\n * Log error messages (string array format)\n * \n * Used by flow-index-installer which aggregates errors across platforms.\n * \n * @param errors - Array of error message strings\n */\nexport function logErrorMessages(errors: string[]): void {\n if (errors.length === 0) return;\n \n logger.error(`Encountered ${errors.length} errors during installation:`);\n for (const error of errors) {\n logger.error(` ${error}`);\n }\n}\n\n/**\n * Log complete installation result with processing summary\n * \n * Logs files processed/written and then delegates to conflict/error logging.\n * \n * @param result - Flow installation result\n * @param packageName - Package being installed\n * @param platform - Target platform\n * @param dryRun - Whether this is a dry run\n */\nexport function logInstallationResult(\n result: FlowInstallResult,\n packageName: string,\n platform: string,\n dryRun: boolean\n): void {\n if (result.filesProcessed > 0) {\n logger.info(\n `Processed ${result.filesProcessed} files for ${packageName} on platform ${platform}` +\n (dryRun ? ' (dry run)' : `, wrote ${result.filesWritten} files`)\n );\n }\n \n logConflicts(result.conflicts);\n logErrors(result.errors);\n}\n", "/**\n * Flow Execution Coordinator\n * \n * Coordinates flow execution for discovered source files.\n * Handles target path resolution, context building, and result aggregation.\n */\n\nimport { join, dirname, basename, relative, extname } from 'path';\nimport type { Flow, FlowContext, FlowResult } from '../../types/flows.js';\nimport type { FlowExecutor } from '../../types/flows.js';\nimport type { Platform } from '../platforms.js';\nimport type { WorkspaceIndexFileMapping } from '../../types/workspace-index.js';\nimport { createFlowExecutor } from './flow-executor.js';\nimport { \n resolvePattern, \n extractCapturedName, \n getFirstFromPattern \n} from './flow-source-discovery.js';\nimport { resolveRecursiveGlobTargetRelativePath } from '../glob-target-mapping.js';\nimport { logger } from '../../utils/logger.js';\nimport { stripPlatformSuffixFromFilename } from './platform-suffix-handler.js';\nimport { resolveSwitchExpression } from './switch-resolver.js';\nimport { extractToPatternString } from './to-pattern-extractor.js';\nimport { normalizePathForProcessing } from '../../utils/path-normalization.js';\nimport { deriveResourceLeafFromPackageName } from '../../utils/plugin-naming.js';\nimport { deduplicateTargets } from '../../utils/workspace-index-helpers.js';\n\n/**\n * Execution result with enhanced metadata\n */\nexport interface ExecutionResult {\n success: boolean;\n filesProcessed: number;\n filesWritten: number;\n targetPaths: string[];\n fileMapping: Record<string, (string | WorkspaceIndexFileMapping)[]>;\n conflicts: Array<{\n path: string;\n winner: string;\n losers: string[];\n }>;\n errors: Array<{\n flow: Flow;\n sourcePath: string;\n error: Error;\n message: string;\n }>;\n warnings: string[];\n}\n\n/**\n * Execution context for coordinating flow execution\n */\nexport interface CoordinatorContext extends FlowContext {\n /**\n * Flow executor instance to use\n */\n executor?: FlowExecutor;\n}\n\n/**\n * Execute flows for discovered source files\n * \n * @param flowSources - Map of flows to source files\n * @param context - Execution context\n * @returns Execution result with aggregated metrics\n */\nexport async function executeFlowsForSources(\n flowSources: Map<Flow, string[]>,\n context: CoordinatorContext\n): Promise<ExecutionResult> {\n const result: ExecutionResult = {\n success: true,\n filesProcessed: 0,\n filesWritten: 0,\n targetPaths: [],\n fileMapping: {},\n conflicts: [],\n errors: [],\n warnings: []\n };\n\n const executor = context.executor || createFlowExecutor();\n \n for (const [flow, sources] of flowSources) {\n for (const sourceRel of sources) {\n try {\n const sourceResult = await processSourceFile(\n flow,\n sourceRel,\n context,\n executor\n );\n \n // Aggregate results\n if (sourceResult.processed) {\n result.filesProcessed++;\n }\n \n if (sourceResult.written && !context.dryRun) {\n result.filesWritten++;\n }\n \n if (sourceResult.targetPath) {\n result.targetPaths.push(sourceResult.targetPath);\n }\n \n if (sourceResult.fileMapping) {\n const key = sourceResult.mappingKey || sourceRel;\n if (!result.fileMapping[key]) {\n result.fileMapping[key] = [];\n }\n result.fileMapping[key] = deduplicateTargets(\n result.fileMapping[key], sourceResult.fileMapping\n );\n }\n \n if (sourceResult.conflicts) {\n result.conflicts.push(...sourceResult.conflicts);\n }\n\n if (sourceResult.warnings) {\n result.warnings.push(...sourceResult.warnings);\n }\n\n if (!sourceResult.success) {\n result.success = false;\n if (sourceResult.error) {\n result.errors.push({\n flow,\n sourcePath: sourceRel,\n error: sourceResult.error,\n message: sourceResult.error.message\n });\n }\n }\n \n } catch (error) {\n result.success = false;\n result.errors.push({\n flow,\n sourcePath: sourceRel,\n error: error as Error,\n message: `Error processing ${sourceRel}: ${(error as Error).message}`\n });\n }\n }\n }\n \n return result;\n}\n\n/**\n * Result from processing a single source file\n */\ninterface SourceProcessingResult {\n success: boolean;\n processed: boolean;\n written: boolean;\n targetPath?: string;\n fileMapping?: (string | WorkspaceIndexFileMapping)[];\n mappingKey?: string;\n conflicts?: Array<{\n path: string;\n winner: string;\n losers: string[];\n }>;\n error?: Error;\n warnings?: string[];\n}\n\n/**\n * Process a single source file through a flow\n *\n * @param flow - Flow to execute\n * @param sourceRel - Source file path (relative to package root)\n * @param context - Execution context\n * @param executor - Flow executor instance\n * @returns Processing result\n */\nasync function processSourceFile(\n flow: Flow,\n sourceRel: string,\n context: CoordinatorContext,\n executor: FlowExecutor\n): Promise<SourceProcessingResult> {\n const sourceAbs = join(context.packageRoot, sourceRel);\n\n // Extract captured name from pattern\n const firstPattern = getFirstFromPattern(flow.from);\n const capturedName = extractCapturedName(sourceRel, firstPattern);\n \n // Build source-specific context\n const sourceContext: FlowContext = {\n ...context,\n variables: {\n ...context.variables,\n sourcePath: sourceRel,\n sourceDir: dirname(sourceRel),\n sourceFile: basename(sourceRel),\n ...(capturedName ? { capturedName } : {})\n }\n };\n \n const rawToPattern = extractToPatternString(\n flow.to,\n (sw) => resolveSwitchExpression(sw, sourceContext)\n ) ?? '';\n const resolvedToPattern = resolvePattern(rawToPattern, sourceContext, capturedName);\n const targetAbs = resolveTargetFromGlob(\n sourceAbs,\n firstPattern,\n resolvedToPattern,\n sourceContext\n );\n const targetRel = relative(context.workspaceRoot, targetAbs);\n // Apply namespace remap if present (prefix-based namespacing)\n const targetRelNorm = normalizePathForProcessing(targetRel);\n const remappedRel = context.targetPathRemap?.get(targetRelNorm);\n const finalTargetRel = remappedRel ?? targetRelNorm;\n\n // Extract source schema path from original flow (FlowPatternValue has .schema)\n let sourceSchemaPath: string | undefined;\n const originalFrom = flow.from;\n if (typeof originalFrom === 'object' && originalFrom !== null && !('$switch' in originalFrom)) {\n if (Array.isArray(originalFrom)) {\n const first = originalFrom[0];\n if (typeof first === 'object' && first !== null && 'schema' in first) {\n sourceSchemaPath = (first as { schema?: string }).schema;\n }\n } else if ('schema' in originalFrom) {\n sourceSchemaPath = (originalFrom as { schema?: string }).schema;\n }\n }\n\n // Create concrete flow with resolved paths\n const concreteFlow: Flow = {\n ...flow,\n from: sourceRel,\n to: finalTargetRel,\n ...(sourceSchemaPath ? { sourceSchema: sourceSchemaPath } : {}),\n };\n \n // Execute flow\n const flowResult = await executor.executeFlow(concreteFlow, sourceContext);\n \n // Check if flow was skipped\n const wasSkipped = flowResult.warnings?.includes('Flow skipped due to condition');\n \n if (wasSkipped) {\n return {\n success: true,\n processed: false,\n written: false\n };\n }\n \n if (!flowResult.success) {\n return {\n success: false,\n processed: true,\n written: false,\n error: flowResult.error || new Error('Flow execution failed')\n };\n }\n \n // Process successful result\n const target = typeof flowResult.target === 'string' \n ? flowResult.target \n : (flowResult.target as any);\n \n if (typeof target !== 'string') {\n return {\n success: true,\n processed: true,\n written: false\n };\n }\n \n const targetRelFromWorkspace = relative(context.workspaceRoot, target);\n const normalizedTargetRel = targetRelFromWorkspace.replace(/\\\\/g, '/');\n \n // Build file mapping\n const isKeyTrackedMerge =\n (flowResult.merge === 'deep' || flowResult.merge === 'shallow') &&\n Array.isArray(flowResult.keys);\n \n const fileMapping: (string | WorkspaceIndexFileMapping)[] = [];\n \n if (isKeyTrackedMerge) {\n fileMapping.push({\n target: normalizedTargetRel,\n merge: flowResult.merge,\n keys: flowResult.keys,\n hash: flowResult.contentHash,\n sourceHash: flowResult.sourceContentHash,\n });\n } else if (flowResult.contentHash) {\n fileMapping.push({\n target: normalizedTargetRel,\n hash: flowResult.contentHash,\n sourceHash: flowResult.sourceContentHash,\n });\n } else {\n fileMapping.push(normalizedTargetRel);\n }\n \n // Extract conflicts\n const conflicts = flowResult.conflicts?.map(conflict => ({\n path: conflict.path,\n winner: conflict.winner,\n losers: conflict.losers\n })) || [];\n \n // Collect non-skip warnings (e.g., schema validation warnings)\n const validationWarnings = flowResult.warnings?.filter(\n w => w !== 'Flow skipped due to condition' && w !== 'No files matched pattern'\n );\n\n return {\n success: true,\n processed: true,\n written: true,\n targetPath: target,\n fileMapping,\n mappingKey: sourceRel,\n conflicts,\n ...(validationWarnings && validationWarnings.length > 0 ? { warnings: validationWarnings } : {}),\n };\n}\n\n/**\n * Resolve target path from glob patterns\n * Strips platform suffixes from filenames (e.g. read-specs.claude.md -> read-specs.md)\n * \n * @param sourceAbsPath - Absolute source path\n * @param fromPattern - Source pattern from flow\n * @param toPattern - Target pattern from flow\n * @param context - Flow context\n * @returns Resolved absolute target path\n */\nexport function resolveTargetFromGlob(\n sourceAbsPath: string,\n fromPattern: string,\n toPattern: string,\n context: FlowContext\n): string {\n const sourceRelFromPackage = relative(context.packageRoot, sourceAbsPath);\n \n // If 'to' pattern has glob, map the structure\n if (toPattern.includes('*')) {\n // Handle ** recursive patterns\n if (toPattern.includes('**')) {\n const targetRel = resolveRecursiveGlobTargetRelativePath(\n sourceRelFromPackage,\n fromPattern,\n toPattern\n );\n return join(context.workspaceRoot, targetRel);\n }\n \n // Single-level * patterns\n const sourceExt = extname(sourceAbsPath);\n const sourceBase = basename(sourceAbsPath, sourceExt);\n \n const toParts = toPattern.split('*');\n const toPrefix = toParts[0];\n const toSuffix = toParts[1] || '';\n \n const targetExt = toSuffix.startsWith('.') ? toSuffix : (sourceExt + toSuffix);\n const targetFileName = sourceBase + targetExt;\n \n // Strip platform suffix from the final target filename\n const strippedTargetFileName = stripPlatformSuffixFromFilename(targetFileName);\n \n return join(context.workspaceRoot, toPrefix + strippedTargetFileName);\n }\n \n // No glob in target - use as-is\n return join(context.workspaceRoot, toPattern);\n}\n\n/**\n * Build flow context with standard variables\n * \n * @param baseContext - Base context properties\n * @param platformDef - Platform definition (for rootFile, rootDir)\n * @returns Complete flow context\n */\nexport function buildFlowContext(\n baseContext: {\n workspaceRoot: string;\n packageRoot: string;\n platform: Platform;\n packageName: string;\n packageVersion: string;\n priority: number;\n dryRun: boolean;\n direction: 'install' | 'save';\n },\n platformDef: {\n rootFile?: string;\n rootDir?: string;\n }\n): FlowContext {\n const resourceLeaf = deriveResourceLeafFromPackageName(baseContext.packageName);\n\n return {\n workspaceRoot: baseContext.workspaceRoot,\n packageRoot: baseContext.packageRoot,\n platform: baseContext.platform,\n packageName: baseContext.packageName,\n direction: baseContext.direction,\n variables: {\n name: baseContext.packageName,\n version: baseContext.packageVersion,\n priority: baseContext.priority,\n rootFile: platformDef.rootFile,\n rootDir: platformDef.rootDir,\n resourceLeaf\n },\n dryRun: baseContext.dryRun\n };\n}\n\n/**\n * Aggregate results from multiple executions\n * \n * @param results - Array of execution results\n * @returns Combined result\n */\nexport function aggregateExecutionResults(results: ExecutionResult[]): ExecutionResult {\n const aggregated: ExecutionResult = {\n success: true,\n filesProcessed: 0,\n filesWritten: 0,\n targetPaths: [],\n fileMapping: {},\n conflicts: [],\n errors: [],\n warnings: []\n };\n \n for (const result of results) {\n aggregated.filesProcessed += result.filesProcessed;\n aggregated.filesWritten += result.filesWritten;\n aggregated.targetPaths.push(...result.targetPaths);\n aggregated.conflicts.push(...result.conflicts);\n aggregated.errors.push(...result.errors);\n \n // Merge file mappings\n for (const [source, targets] of Object.entries(result.fileMapping)) {\n aggregated.fileMapping[source] = deduplicateTargets(\n aggregated.fileMapping[source] ?? [], targets\n );\n }\n \n if (!result.success) {\n aggregated.success = false;\n }\n }\n \n return aggregated;\n}\n", "/**\n * Flow Executor\n * \n * Executes flows through a multi-stage pipeline:\n * 1. Load source file and parse format\n * 2. Extract JSONPath (if specified)\n * 3. Pick/omit keys\n * 4. Map keys (with transforms)\n * 5. Apply pipe transforms\n * 6. Embed in target structure\n * 7. Merge with existing target (priority-based)\n * 8. Write to target file\n */\n\nimport { promises as fs } from 'fs';\nimport fsSync from 'fs';\nimport path from 'path';\nimport yaml from 'js-yaml';\nimport * as TOML from 'smol-toml';\nimport { parse as parseJsonc } from 'jsonc-parser';\nimport { JSONPath } from 'jsonpath-plus';\nimport { minimatch } from 'minimatch';\nimport * as fsUtils from '../../utils/fs.js';\nimport { calculateFileHash } from '../../utils/hash-utils.js';\nimport { mergePackageContentIntoRootFile } from '../../utils/root-file-merger.js';\nimport { resolveRecursiveGlobTargetRelativePath } from '../glob-target-mapping.js';\nimport type {\n Flow,\n FlowContext,\n FlowResult,\n FlowExecutor,\n FlowConflict,\n ValidationResult,\n ValidationError,\n ParsedContent,\n FileFormat,\n MultiTargetFlows,\n SwitchExpression,\n} from '../../types/flows.js';\nimport { logger } from '../../utils/logger.js';\nimport { \n defaultTransformRegistry, \n TransformRegistry,\n} from './flow-transforms.js';\nimport { mergeInlinePlatformOverride } from '../platform-yaml-merge.js';\nimport { \n getNestedValue,\n setNestedValue,\n deleteNestedValue\n} from './flow-key-mapper.js';\nimport { extractAllKeys } from './flow-key-extractor.js';\nimport { applyMapPipeline, createMapContext, validateMapPipeline, splitMapPipeline } from './map-pipeline/index.js';\nimport { SourcePatternResolver } from './source-resolver.js';\nimport { resolveSwitchExpression, validateSwitchExpression, isSwitchExpression } from './switch-resolver.js';\nimport { smartEquals, smartNotEquals } from '../../utils/path-comparison.js';\nimport { stripPlatformSuffixFromFilename } from './platform-suffix-handler.js';\nimport { parseMarkdownDocument, serializeMarkdownDocument } from './markdown.js';\nimport { STRUCTURED_FORMAT_EXTENSIONS, MARKDOWN_EXTENSIONS } from '../../constants/index.js';\nimport { schemaRegistry } from '../install/schema-registry.js';\nimport { validateFrontmatterAgainstSchema } from './source-schema-validator.js';\n\n/**\n * Determine if a flow is a pass-through (source bytes == target bytes).\n *\n * Returns true when the executor will perform a simple byte copy with no\n * parsing, transformation, or serialization. This is used by:\n * - The flow executor's internal dispatch (needsParsing \u2192 executePassThroughCopy)\n * - The install strategy to annotate TargetEntries for the conflict resolver\n *\n * Keep this in sync with the executor's pipeline \u2014 any new flow property that\n * transforms content must be handled here.\n */\nexport function isPassThroughFlow(\n flow: Flow,\n sourcePath: string,\n targetPath: string,\n context: FlowContext\n): boolean {\n // Content manipulation operations\n if (flow.map || flow.pick || flow.omit || flow.path || flow.embed) {\n return false;\n }\n\n // Merge operations that need parsing (deep, shallow require parse+merge)\n if (flow.merge) {\n if (flow.merge === 'deep' || flow.merge === 'shallow') {\n return false;\n }\n // 'composite' and 'replace' don't need parsing\n }\n\n // Format conversion \u2014 structured source to different extension requires parse+serialize\n const sourceExt = path.extname(sourcePath).toLowerCase();\n const targetExt = path.extname(targetPath).toLowerCase();\n if (sourceExt !== targetExt && STRUCTURED_FORMAT_EXTENSIONS.has(sourceExt)) {\n return false;\n }\n\n // Markdown with platform-specific frontmatter overrides during install\n if (MARKDOWN_EXTENSIONS.has(sourceExt) && context.platform && context.direction === 'install') {\n return false;\n }\n\n return true;\n}\n\n/**\n * Default flow executor implementation\n */\nexport class DefaultFlowExecutor implements FlowExecutor {\n private transformRegistry: TransformRegistry;\n private sourceResolver: SourcePatternResolver;\n\n constructor(transformRegistry?: TransformRegistry) {\n this.transformRegistry = transformRegistry || defaultTransformRegistry;\n this.sourceResolver = new SourcePatternResolver();\n }\n\n /**\n * Execute a single flow (now supports glob patterns and switch expressions)\n */\n async executeFlow(flow: Flow, context: FlowContext): Promise<FlowResult> {\n const startTime = Date.now();\n try {\n // Resolve switch expressions in 'from' field\n let resolvedFrom = flow.from;\n if (isSwitchExpression(flow.from)) {\n try {\n resolvedFrom = resolveSwitchExpression(flow.from as any as SwitchExpression, context);\n } catch (error) {\n return {\n source: '<switch expression>',\n target: this.normalizeToPattern(flow.to),\n success: false,\n transformed: false,\n error: error instanceof Error ? error : new Error(String(error)),\n executionTime: Date.now() - startTime,\n };\n }\n }\n\n // Resolve switch expressions in 'to' field\n let resolvedTo = flow.to;\n if (isSwitchExpression(flow.to)) {\n try {\n resolvedTo = resolveSwitchExpression(flow.to as SwitchExpression, context);\n } catch (error) {\n return {\n source: this.normalizeFromPattern(resolvedFrom),\n target: '<switch expression>',\n success: false,\n transformed: false,\n error: error instanceof Error ? error : new Error(String(error)),\n executionTime: Date.now() - startTime,\n };\n }\n }\n\n // Check if this is a multi-target flow\n if (typeof resolvedTo !== 'string') {\n const results = await this.executeMultiTarget({ ...flow, from: resolvedFrom, to: resolvedTo }, context);\n // Aggregate results\n return this.aggregateResults(results, startTime);\n }\n\n // Validate flow (with resolved 'from' and 'to')\n const validation = this.validateFlow({ ...flow, from: resolvedFrom, to: resolvedTo });\n if (!validation.valid) {\n return {\n source: this.normalizeFromPattern(resolvedFrom),\n target: resolvedTo as string,\n success: false,\n transformed: false,\n error: new Error(`Invalid flow: ${validation.errors.map(e => e.message).join(', ')}`),\n executionTime: Date.now() - startTime,\n };\n }\n\n // Evaluate conditions\n if (flow.when && !this.evaluateCondition(flow.when, context)) {\n const normalized = this.normalizeFromPattern(resolvedFrom);\n logger.debug(`Flow skipped due to condition: ${normalized} -> ${resolvedTo}`);\n return {\n source: normalized,\n target: resolvedTo as string,\n success: true,\n transformed: false,\n warnings: ['Flow skipped due to condition'],\n executionTime: Date.now() - startTime,\n };\n }\n\n // Resolve source paths (may return multiple files for glob patterns)\n const resolution = await this.resolveSourcePattern(resolvedFrom, context);\n const sourcePaths = resolution.paths;\n const resolutionWarnings = resolution.warnings;\n\n // If no files matched, return success with no files processed\n if (sourcePaths.length === 0) {\n return {\n source: this.normalizeFromPattern(resolvedFrom),\n target: resolvedTo as string,\n success: true,\n transformed: false,\n warnings: resolutionWarnings.length > 0 ? resolutionWarnings : ['No files matched pattern'],\n executionTime: Date.now() - startTime,\n };\n }\n\n // Execute pipeline for each matched file\n const results: FlowResult[] = [];\n const firstFromPattern = this.getFirstPattern(resolvedFrom);\n \n for (const sourcePath of sourcePaths) {\n const targetPath = this.resolveTargetFromGlob(sourcePath, firstFromPattern, resolvedTo as string, context);\n const result = await this.executePipeline(flow, sourcePath, targetPath, context);\n results.push({\n ...result,\n executionTime: Date.now() - startTime,\n });\n }\n \n // Add resolution warnings to first result if any\n if (resolutionWarnings.length > 0 && results.length > 0) {\n results[0].warnings = [\n ...(results[0].warnings || []),\n ...resolutionWarnings,\n ];\n }\n\n // If single file, return single result\n if (results.length === 1) {\n return results[0];\n }\n\n // Aggregate multiple results\n return this.aggregateResults(results, startTime);\n } catch (error) {\n return {\n source: isSwitchExpression(flow.from) ? '<switch>' : this.normalizeFromPattern(flow.from),\n target: this.normalizeToPattern(flow.to),\n success: false,\n transformed: false,\n error: error instanceof Error ? error : new Error(String(error)),\n executionTime: Date.now() - startTime,\n };\n }\n }\n\n /**\n * Execute multiple flows\n */\n async executeFlows(flows: Flow[], context: FlowContext): Promise<FlowResult[]> {\n const results: FlowResult[] = [];\n\n for (const flow of flows) {\n const result = await this.executeFlow(flow, context);\n results.push(result);\n }\n\n return results;\n }\n\n /**\n * Execute a multi-target flow\n */\n async executeMultiTarget(flow: Flow, context: FlowContext): Promise<FlowResult[]> {\n if (typeof flow.to === 'string') {\n throw new Error('Flow is not a multi-target flow');\n }\n\n const multiTarget = flow.to as MultiTargetFlows;\n const normalizedFrom = this.normalizeFromPattern(flow.from);\n \n // Resolve source paths (may be multiple files with glob)\n const resolution = await this.resolveSourcePattern(flow.from, context);\n const sourcePaths = resolution.paths;\n\n // If no files matched\n if (sourcePaths.length === 0) {\n return Object.keys(multiTarget).map(target => ({\n source: normalizedFrom,\n target,\n success: true,\n transformed: false,\n warnings: resolution.warnings.length > 0 ? resolution.warnings : ['No files matched pattern'],\n }));\n }\n\n // Execute each source file\n const allResults: FlowResult[] = [];\n const firstFromPattern = this.getFirstPattern(flow.from);\n\n for (const sourcePath of sourcePaths) {\n // Load and parse source once\n const sourceContent = await this.loadSourceFile(sourcePath, context);\n\n // Execute each target\n for (const [targetPath, targetFlow] of Object.entries(multiTarget)) {\n const startTime = Date.now();\n\n try {\n // Merge target flow with base flow\n const mergedFlow: Flow = {\n ...flow,\n ...targetFlow,\n from: flow.from,\n to: targetPath,\n };\n\n // Evaluate conditions\n if (mergedFlow.when && !this.evaluateCondition(mergedFlow.when, context)) {\n logger.debug(`Multi-target flow skipped due to condition: ${normalizedFrom} -> ${targetPath}`);\n allResults.push({\n source: normalizedFrom,\n target: targetPath,\n success: true,\n transformed: false,\n warnings: ['Flow skipped due to condition'],\n executionTime: Date.now() - startTime,\n });\n continue;\n }\n\n const resolvedTargetPath = this.resolveTargetFromGlob(sourcePath, firstFromPattern, targetPath, context);\n\n // Execute pipeline with pre-loaded content\n const result = await this.executePipelineWithContent(\n mergedFlow,\n sourceContent,\n sourcePath,\n resolvedTargetPath,\n context\n );\n\n allResults.push({\n ...result,\n executionTime: Date.now() - startTime,\n });\n } catch (error) {\n allResults.push({\n source: normalizedFrom,\n target: targetPath,\n success: false,\n transformed: false,\n error: error instanceof Error ? error : new Error(String(error)),\n executionTime: Date.now() - startTime,\n });\n }\n }\n }\n\n return allResults;\n }\n\n /**\n * Validate a flow configuration\n */\n validateFlow(flow: Flow): ValidationResult {\n const errors: ValidationError[] = [];\n\n // Check required fields\n if (!flow.from) {\n errors.push({ message: 'Flow missing required field \"from\"', code: 'MISSING_FROM' });\n }\n\n if (!flow.to) {\n errors.push({ message: 'Flow missing required field \"to\"', code: 'MISSING_TO' });\n }\n\n // Validate switch expression in 'to' field\n if (flow.to && isSwitchExpression(flow.to)) {\n const switchValidation = validateSwitchExpression(flow.to as SwitchExpression);\n if (!switchValidation.valid) {\n for (const error of switchValidation.errors) {\n errors.push({\n message: error,\n code: 'INVALID_SWITCH_EXPRESSION',\n });\n }\n }\n }\n\n // Validate pick/omit\n if (flow.pick && flow.omit) {\n errors.push({ message: 'Flow cannot have both \"pick\" and \"omit\"', code: 'CONFLICTING_FILTERS' });\n }\n\n // Validate merge strategy\n if (flow.merge && !['deep', 'shallow', 'replace', 'composite'].includes(flow.merge)) {\n errors.push({\n message: `Invalid merge strategy: ${flow.merge}. Must be one of: deep, shallow, replace, composite`,\n code: 'INVALID_MERGE',\n });\n }\n\n // Validate JSONPath expression\n if (flow.path) {\n try {\n // Try to validate JSONPath syntax\n JSONPath({ path: flow.path, json: {} });\n } catch (error) {\n errors.push({\n message: `Invalid JSONPath expression: ${flow.path}`,\n code: 'INVALID_JSONPATH',\n });\n }\n }\n\n // Validate map pipeline\n if (flow.map) {\n const mapPipelineValidation = validateMapPipeline(flow.map);\n if (!mapPipelineValidation.valid) {\n for (const error of mapPipelineValidation.errors) {\n errors.push({\n message: error,\n code: 'INVALID_MAP_PIPELINE',\n });\n }\n }\n }\n\n return {\n valid: errors.length === 0,\n errors,\n warnings: [],\n };\n }\n\n /**\n * Execute the transformation pipeline\n */\n private async executePipeline(\n flow: Flow,\n sourcePath: string,\n targetPath: string,\n context: FlowContext\n ): Promise<Omit<FlowResult, 'executionTime'>> {\n // Check if we can skip parsing for pass-through flows\n if (!this.needsParsing(flow, sourcePath, targetPath, context)) {\n return await this.executePassThroughCopy(flow, sourcePath, targetPath, context);\n }\n\n // Hash raw source BEFORE any transforms (platform merge, etc.)\n let sourceContentHash: string | undefined;\n if (!context.dryRun) {\n const rawSourceContent = await fsUtils.readTextFile(sourcePath);\n sourceContentHash = await calculateFileHash(rawSourceContent);\n }\n\n // Step 1: Load source file with parsing\n const sourceContent = await this.loadSourceFile(sourcePath, context);\n\n const result = await this.executePipelineWithContent(flow, sourceContent, sourcePath, targetPath, context);\n return { ...result, sourceContentHash };\n }\n\n /**\n * Execute pipeline with pre-loaded content (for multi-target flows)\n */\n private async executePipelineWithContent(\n flow: Flow,\n sourceContent: ParsedContent,\n sourcePath: string,\n targetPath: string,\n context: FlowContext\n ): Promise<Omit<FlowResult, 'executionTime'>> {\n const warnings: string[] = [];\n const conflicts: FlowConflict[] = [];\n\n let data = sourceContent.data;\n let transformed = false;\n\n // Validate source frontmatter against declared schema (if present)\n if (flow.sourceSchema) {\n const schema = schemaRegistry.loadSchema(flow.sourceSchema);\n if (schema) {\n const dataToValidate = (data && typeof data === 'object' && 'frontmatter' in data)\n ? data.frontmatter\n : (typeof data === 'object' && data !== null ? data : null);\n\n if (dataToValidate) {\n const schemaWarnings = validateFrontmatterAgainstSchema(\n dataToValidate,\n schema,\n path.relative(context.packageRoot, sourcePath),\n );\n warnings.push(...schemaWarnings);\n }\n }\n }\n\n try {\n // Step 2: Extract JSONPath (if specified)\n if (flow.path) {\n data = this.extractJSONPath(data, flow.path);\n transformed = true;\n }\n\n // Step 3: Pick/omit keys\n if (flow.pick) {\n data = this.pickKeys(data, flow.pick);\n transformed = true;\n } else if (flow.omit) {\n data = this.omitKeys(data, flow.omit);\n transformed = true;\n }\n\n // Step 4: Apply map pipeline\n // Split into schema operations and pipe operations\n // Schema ops are applied BEFORE merge, pipe ops are applied AFTER merge\n let contributedKeys: string[] | undefined;\n let schemaOps: any[] = [];\n let pipeOps: any[] = [];\n \n if (flow.map) {\n const split = splitMapPipeline(flow.map);\n schemaOps = split.schemaOps;\n pipeOps = split.pipeOps;\n \n // Apply schema operations first (before merge)\n if (schemaOps.length > 0) {\n const mapContext = createMapContext({\n filename: path.basename(sourcePath, path.extname(sourcePath)),\n dirname: path.basename(path.dirname(sourcePath)),\n path: path.relative(context.packageRoot, sourcePath),\n ext: path.extname(sourcePath),\n });\n\n // Auto-split frontmatter only for .md \u2192 .md flows. For .md \u2192 structured\n // (toml/json/yaml/etc.), the user's pipeline must reshape both halves\n // into a single flat object, so pass the whole document.\n const targetFormat = this.detectFormat(targetPath, '');\n const isMarkdownTarget = targetFormat === 'markdown' || targetFormat === 'md';\n if (data && typeof data === 'object' && 'frontmatter' in data && isMarkdownTarget) {\n data.frontmatter = applyMapPipeline(\n data.frontmatter,\n schemaOps,\n mapContext,\n this.transformRegistry\n );\n } else {\n data = applyMapPipeline(\n data,\n schemaOps,\n mapContext,\n this.transformRegistry\n );\n }\n transformed = true;\n }\n }\n \n // Track keys AFTER schema transforms but BEFORE merge and pipe transforms\n // This represents the structured data this package contributes\n const shouldTrackKeys =\n Boolean(flow.merge) &&\n flow.merge !== 'replace' &&\n flow.merge !== 'composite';\n \n if (shouldTrackKeys && typeof data === 'object' && data !== null) {\n // Extract from frontmatter if it's a markdown file\n const dataToExtract = (data && 'frontmatter' in data) ? data.frontmatter : data;\n if (typeof dataToExtract === 'object' && dataToExtract !== null) {\n contributedKeys = extractAllKeys(dataToExtract);\n }\n }\n\n const targetExists = await fsUtils.exists(targetPath);\n\n // Step 6: Embed in target structure\n if (flow.embed) {\n data = this.embedContent(data, flow.embed);\n transformed = true;\n }\n\n // Step 7: Merge with existing target (if needed)\n if (targetExists) {\n const targetContent = await this.loadSourceFile(targetPath, context);\n \n // Special handling for composite merge - works with raw text\n if (flow.merge === 'composite') {\n // Use raw content for composite merge\n const sourceRaw = sourceContent.raw;\n const targetRaw = targetContent.raw;\n data = mergePackageContentIntoRootFile(targetRaw, context.packageName, sourceRaw);\n transformed = true;\n } else {\n // Normal merge for other strategies\n const mergeResult = this.mergeContent(\n data,\n targetContent.data,\n flow.merge || 'replace',\n context\n );\n data = mergeResult.data;\n conflicts.push(...mergeResult.conflicts);\n if (mergeResult.conflicts.length > 0) {\n transformed = true;\n }\n }\n }\n\n // Step 7.5: Apply pipe operations AFTER merge (format conversions)\n // These operations may convert the data to a string format (e.g., json-to-toml)\n if (pipeOps.length > 0) {\n const mapContext = createMapContext({\n filename: path.basename(sourcePath, path.extname(sourcePath)),\n dirname: path.basename(path.dirname(sourcePath)),\n path: path.relative(context.packageRoot, sourcePath),\n ext: path.extname(sourcePath),\n });\n\n // Auto-split frontmatter only for .md \u2192 .md flows (see Step 4 comment).\n const targetFormat = this.detectFormat(targetPath, '');\n const isMarkdownTarget = targetFormat === 'markdown' || targetFormat === 'md';\n if (data && typeof data === 'object' && 'frontmatter' in data && isMarkdownTarget) {\n data.frontmatter = applyMapPipeline(\n data.frontmatter,\n pipeOps,\n mapContext,\n this.transformRegistry\n );\n } else {\n data = applyMapPipeline(\n data,\n pipeOps,\n mapContext,\n this.transformRegistry\n );\n }\n transformed = true;\n }\n\n // Step 8: Write to target file\n let contentHash: string | undefined;\n if (!context.dryRun) {\n contentHash = await this.writeTargetFile(targetPath, data, sourceContent.format);\n }\n\n return {\n source: sourcePath,\n target: targetPath,\n success: true,\n transformed,\n keys: contributedKeys,\n merge: flow.merge,\n contentHash,\n warnings: warnings.length > 0 ? warnings : undefined,\n conflicts: conflicts.length > 0 ? conflicts : undefined,\n pipeline: this.getPipeline(flow),\n };\n } catch (error) {\n return {\n source: sourcePath,\n target: targetPath,\n success: false,\n transformed,\n error: error instanceof Error ? error : new Error(String(error)),\n warnings: warnings.length > 0 ? warnings : undefined,\n };\n }\n }\n\n /**\n * Load and parse source file\n */\n async loadSourceFile(filePath: string, context?: FlowContext): Promise<ParsedContent> {\n let raw = await fsUtils.readTextFile(filePath);\n const format = this.detectFormat(filePath, raw);\n \n // Apply platform-specific frontmatter overrides for markdown files during install\n if ((format === 'markdown' || format === 'md') && context?.platform && context?.direction === 'install') {\n raw = mergeInlinePlatformOverride(raw, context.platform, context.workspaceRoot);\n }\n \n const data = this.parseSourceContent(raw, format);\n\n return { data, format, raw };\n }\n\n /**\n * Write transformed content to target file\n */\n async writeTargetFile(filePath: string, content: any, sourceFormat: FileFormat): Promise<string> {\n // Detect target format from file extension\n const targetFormat = this.detectFormat(filePath, '');\n const serialized = this.serializeTargetContent(content, targetFormat);\n const hash = await calculateFileHash(serialized);\n\n // Skip write if target already has identical content (e.g., claimed unowned file)\n if (await fsUtils.exists(filePath)) {\n try {\n const existing = await fsUtils.readTextFile(filePath, 'utf8');\n if (existing === serialized) {\n return hash;\n }\n } catch {\n // Read failed \u2014 proceed with write\n }\n }\n\n await fsUtils.ensureDir(path.dirname(filePath));\n await fsUtils.writeTextFile(filePath, serialized);\n return hash;\n }\n\n /**\n * Determine if parsing is needed for this flow.\n * Delegates to the standalone `isPassThroughFlow()` function.\n */\n private needsParsing(flow: Flow, sourcePath: string, targetPath: string, context: FlowContext): boolean {\n return !isPassThroughFlow(flow, sourcePath, targetPath, context);\n }\n\n /**\n * Execute a pass-through copy without parsing\n * Used for simple file copies with no transformations\n */\n private async executePassThroughCopy(\n flow: Flow,\n sourcePath: string,\n targetPath: string,\n context: FlowContext\n ): Promise<Omit<FlowResult, 'executionTime'>> {\n const warnings: string[] = [];\n\n try {\n // Check if target exists and handle merge strategy\n const targetExists = await fsUtils.exists(targetPath);\n \n if (targetExists && flow.merge && flow.merge !== 'replace') {\n // If merge strategy other than replace, we need to parse after all\n // This is a safety check - needsParsing should have caught this\n logger.warn(`Pass-through copy attempted with merge strategy '${flow.merge}' - falling back to parsed pipeline`);\n const sourceContent = await this.loadSourceFile(sourcePath, context);\n return this.executePipelineWithContent(flow, sourceContent, sourcePath, targetPath, context);\n }\n\n // Simple byte copy\n let contentHash: string | undefined;\n if (!context.dryRun) {\n const content = await fs.readFile(sourcePath);\n contentHash = await calculateFileHash(content.toString('utf8'));\n\n // Skip write if target already has identical content\n if (await fsUtils.exists(targetPath)) {\n try {\n const existing = await fs.readFile(targetPath);\n if (Buffer.compare(content, existing) === 0) {\n // Content identical \u2014 return success without writing\n return {\n source: sourcePath,\n target: targetPath,\n success: true,\n transformed: false,\n contentHash,\n sourceContentHash: contentHash,\n warnings: warnings.length > 0 ? warnings : undefined,\n };\n }\n } catch {\n // Read failed \u2014 proceed with write\n }\n }\n\n await fsUtils.ensureDir(path.dirname(targetPath));\n await fs.writeFile(targetPath, content);\n\n // Preserve executable permission from source file\n try {\n const sourceStat = await fs.stat(sourcePath);\n if (sourceStat.mode & 0o111) {\n await fs.chmod(targetPath, sourceStat.mode);\n }\n } catch {\n // Ignore permission preservation failures\n }\n }\n\n return {\n source: sourcePath,\n target: targetPath,\n success: true,\n transformed: false,\n contentHash,\n sourceContentHash: contentHash,\n warnings: warnings.length > 0 ? warnings : undefined,\n };\n } catch (error) {\n return {\n source: sourcePath,\n target: targetPath,\n success: false,\n transformed: false,\n error: error instanceof Error ? error : new Error(String(error)),\n warnings: warnings.length > 0 ? warnings : undefined,\n };\n }\n }\n\n /**\n * Parse source content based on format\n */\n parseSourceContent(content: string, format: FileFormat): any {\n // Handle empty content gracefully for structured formats\n if (!content.trim()) {\n switch (format) {\n case 'json':\n case 'jsonc':\n case 'yaml':\n case 'yml':\n case 'toml':\n return {};\n case 'markdown':\n case 'md':\n return { body: '' };\n case 'text':\n case 'txt':\n default:\n return content;\n }\n }\n\n try {\n switch (format) {\n case 'json':\n case 'jsonc':\n // Remove comments for JSONC\n const cleaned = content;\n if (format === 'jsonc') {\n const parsed = parseJsonc(cleaned);\n if (parsed === undefined) {\n throw new Error('jsonc-parser returned undefined');\n }\n return parsed;\n }\n return JSON.parse(cleaned);\n\n case 'yaml':\n case 'yml':\n return yaml.load(content);\n\n case 'toml':\n try {\n return TOML.parse(content);\n } catch (error) {\n throw new Error(`TOML parse error: ${error instanceof Error ? error.message : String(error)}`);\n }\n\n case 'markdown':\n case 'md':\n return this.parseMarkdown(content);\n\n case 'text':\n case 'txt':\n default:\n return content;\n }\n } catch (error) {\n throw new Error(`Failed to parse ${format} content: ${error instanceof Error ? error.message : String(error)}`);\n }\n }\n\n /**\n * Serialize content to target format\n */\n serializeTargetContent(content: any, format: FileFormat): string {\n try {\n switch (format) {\n case 'json':\n case 'jsonc':\n return JSON.stringify(content, null, 2);\n\n case 'yaml':\n case 'yml':\n return yaml.dump(content, { indent: 2, flowLevel: -1, lineWidth: -1 });\n\n case 'toml':\n // If a pipeline already produced TOML text (e.g. via domain transforms),\n // don't stringify again.\n if (typeof content === 'string') {\n return content;\n }\n try {\n // Serialize to TOML\n let toml = TOML.stringify(content);\n \n // Apply inline table formatting for Codex MCP configs\n if (content && typeof content === 'object' && content.mcp_servers) {\n toml = this.applyCodexTomlFormatting(toml);\n }\n \n return toml;\n } catch (error) {\n throw new Error(`TOML stringify error: ${error instanceof Error ? error.message : String(error)}`);\n }\n\n case 'markdown':\n case 'md':\n return this.serializeMarkdown(content);\n\n case 'text':\n case 'txt':\n default:\n return typeof content === 'string' ? content : JSON.stringify(content, null, 2);\n }\n } catch (error) {\n throw new Error(`Failed to serialize ${format} content: ${error instanceof Error ? error.message : String(error)}`);\n }\n }\n\n /**\n * Detect format from file extension or content\n */\n private detectFormat(filePath: string, content: string): FileFormat {\n const ext = path.extname(filePath).toLowerCase();\n\n const extMap: Record<string, FileFormat> = {\n '.json': 'json',\n '.jsonc': 'jsonc',\n '.yaml': 'yaml',\n '.yml': 'yml',\n '.toml': 'toml',\n '.md': 'markdown',\n '.mdc': 'markdown',\n '.markdown': 'markdown',\n '.txt': 'text',\n };\n\n if (extMap[ext]) {\n return extMap[ext];\n }\n\n // Try to detect from content\n if (content.trim().startsWith('{') || content.trim().startsWith('[')) {\n return content.includes('//') || content.includes('/*') ? 'jsonc' : 'json';\n }\n\n if (content.includes('---\\n') || content.includes('\\n---\\n')) {\n return 'markdown';\n }\n\n return 'text';\n }\n\n /**\n * Extract data using JSONPath\n */\n private extractJSONPath(data: any, jsonPath: string): any {\n try {\n const result = JSONPath({ path: jsonPath, json: data });\n return result.length === 1 ? result[0] : result;\n } catch (error) {\n throw new Error(`JSONPath extraction failed: ${error instanceof Error ? error.message : String(error)}`);\n }\n }\n\n /**\n * Pick specified keys\n */\n private pickKeys(data: any, keys: string[]): any {\n if (typeof data !== 'object' || data === null) {\n return data;\n }\n\n const result: any = Array.isArray(data) ? [] : {};\n\n for (const key of keys) {\n if (key.includes('.')) {\n // Handle nested keys\n this.setNestedValue(result, key, this.getNestedValue(data, key));\n } else if (key in data) {\n result[key] = data[key];\n }\n }\n\n return result;\n }\n\n /**\n * Omit specified keys\n */\n private omitKeys(data: any, keys: string[]): any {\n if (typeof data !== 'object' || data === null) {\n return data;\n }\n\n const result = Array.isArray(data) ? [...data] : { ...data };\n\n for (const key of keys) {\n if (key.includes('.')) {\n // Handle nested keys\n this.deleteNestedValue(result, key);\n } else {\n delete result[key];\n }\n }\n\n return result;\n }\n\n /**\n * Apply pipe transforms\n */\n private async applyPipeTransforms(data: any, transforms: string[], context: FlowContext): Promise<any> {\n let result = data;\n\n for (const transformSpec of transforms) {\n try {\n // Parse transform specification\n // Format: \"transform-name\" or \"transform-name(option1=value1,option2=value2)\"\n const { name, options } = this.parseTransformSpec(transformSpec);\n\n logger.debug(`Applying transform: ${name}`, options);\n\n // Execute transform\n result = this.transformRegistry.execute(name, result, options);\n } catch (error) {\n throw new Error(`Transform '${transformSpec}' failed: ${error instanceof Error ? error.message : String(error)}`);\n }\n }\n\n return result;\n }\n\n /**\n * Parse transform specification\n * Examples: \"trim\", \"number\", \"pick-keys(keys=[a,b,c])\"\n */\n private parseTransformSpec(spec: string): { name: string; options?: any } {\n const match = spec.match(/^([a-z-]+)(?:\\((.+)\\))?$/);\n if (!match) {\n throw new Error(`Invalid transform specification: ${spec}`);\n }\n\n const [, name, optionsStr] = match;\n\n if (!optionsStr) {\n return { name };\n }\n\n // Parse options (simple key=value format)\n const options: any = {};\n const pairs = optionsStr.split(',').map(s => s.trim());\n \n for (const pair of pairs) {\n const [key, value] = pair.split('=').map(s => s.trim());\n \n // Parse value type\n if (value.startsWith('[') && value.endsWith(']')) {\n // Array\n options[key] = value.slice(1, -1).split(',').map(s => s.trim());\n } else if (value === 'true' || value === 'false') {\n // Boolean\n options[key] = value === 'true';\n } else if (!isNaN(Number(value))) {\n // Number\n options[key] = Number(value);\n } else {\n // String\n options[key] = value;\n }\n }\n\n return { name, options };\n }\n\n /**\n * Embed content under a key\n */\n private embedContent(data: any, key: string): any {\n return { [key]: data };\n }\n\n /**\n * Merge content with priority-based conflict resolution\n */\n private mergeContent(\n source: any,\n target: any,\n strategy: string,\n context: FlowContext\n ): { data: any; conflicts: FlowConflict[] } {\n const conflicts: FlowConflict[] = [];\n\n let merged: any;\n\n switch (strategy) {\n case 'replace':\n merged = source;\n break;\n\n case 'shallow':\n merged = { ...target, ...source };\n break;\n\n case 'deep':\n merged = this.deepMerge(target, source, conflicts, context);\n break;\n\n case 'composite':\n // Composite merge is handled earlier in the pipeline (Step 7)\n // This case should not be reached\n merged = source;\n break;\n\n default:\n merged = source;\n }\n\n return { data: merged, conflicts };\n }\n\n /**\n * Deep merge two objects\n */\n private deepMerge(target: any, source: any, conflicts: FlowConflict[], context: FlowContext, keyPath: string = ''): any {\n if (typeof source !== 'object' || source === null) {\n return source;\n }\n\n if (typeof target !== 'object' || target === null) {\n return source;\n }\n\n if (Array.isArray(source) && Array.isArray(target)) {\n // Merge arrays\n return [...target, ...source];\n }\n\n const result = { ...target };\n\n for (const key of Object.keys(source)) {\n const currentPath = keyPath ? `${keyPath}.${key}` : key;\n\n if (!(key in target)) {\n result[key] = source[key];\n } else if (typeof source[key] === 'object' && typeof target[key] === 'object') {\n result[key] = this.deepMerge(target[key], source[key], conflicts, context, currentPath);\n } else if (source[key] !== target[key]) {\n // Conflict detected\n conflicts.push({\n path: currentPath,\n winner: context.packageName,\n losers: ['existing'],\n type: 'value',\n resolution: 'last-writer-wins',\n });\n result[key] = source[key];\n }\n }\n\n return result;\n }\n\n /**\n * Evaluate condition\n */\n private evaluateCondition(condition: any, context: FlowContext): boolean {\n if (condition.and) {\n return condition.and.every((c: any) => this.evaluateCondition(c, context));\n }\n\n if (condition.or) {\n return condition.or.some((c: any) => this.evaluateCondition(c, context));\n }\n\n if (condition.not) {\n return !this.evaluateCondition(condition.not, context);\n }\n\n if (condition.$eq) {\n const [left, right] = condition.$eq;\n const leftVal = this.resolveValue(left, context);\n const rightVal = this.resolveValue(right, context);\n return smartEquals(leftVal, rightVal);\n }\n\n if (condition.$ne) {\n const [left, right] = condition.$ne;\n const leftVal = this.resolveValue(left, context);\n const rightVal = this.resolveValue(right, context);\n return smartNotEquals(leftVal, rightVal);\n }\n\n if (condition.exists) {\n const testPath = path.join(context.workspaceRoot, condition.exists);\n // Use existsSync for synchronous condition evaluation\n return fsSync.existsSync(testPath);\n }\n\n if (condition.platform) {\n return context.platform === condition.platform;\n }\n\n return true;\n }\n\n /**\n * Resolve a value, handling $$variable references\n */\n private resolveValue(value: string, context: FlowContext): any {\n if (value.startsWith('$$')) {\n const varName = value.slice(2);\n return context.variables[varName];\n }\n return value;\n }\n\n\n\n /**\n * Resolve pattern with glob support and priority-based array matching\n * Returns resolved file paths (glob patterns return multiple files)\n */\n private async resolveSourcePattern(\n pattern: string | string[] | SwitchExpression,\n context: FlowContext\n ): Promise<{ paths: string[]; warnings: string[] }> {\n if (isSwitchExpression(pattern)) {\n throw new Error('Cannot resolve SwitchExpression - expression must be resolved first');\n }\n // Type narrowing: pattern is now string | string[]\n const narrowedPattern = pattern as string | string[];\n const result = await this.sourceResolver.resolve(narrowedPattern, {\n baseDir: context.packageRoot,\n logWarnings: true,\n });\n\n return {\n paths: result.paths,\n warnings: result.warnings,\n };\n }\n\n /**\n * Resolve target path from source path and patterns\n * Handles single-level (*) and recursive (**) globs\n * Strips platform suffixes from filenames (e.g. read-specs.claude.md -> read-specs.md)\n */\n private resolveTargetFromGlob(sourcePath: string, fromPattern: string, toPattern: string, context: FlowContext): string {\n // Get relative path from package root\n const relativePath = path.relative(context.packageRoot, sourcePath);\n \n // If 'to' pattern has glob, map the structure\n if (toPattern.includes('*')) {\n // Handle ** recursive patterns\n if (toPattern.includes('**')) {\n const targetRel = resolveRecursiveGlobTargetRelativePath(\n relativePath,\n fromPattern,\n toPattern\n );\n return path.join(context.workspaceRoot, targetRel);\n }\n \n // Handle single-level * patterns\n const sourceFileName = path.basename(sourcePath);\n const sourceExt = path.extname(sourcePath);\n const sourceBase = path.basename(sourcePath, sourceExt);\n \n const toParts = toPattern.split('*');\n const toPrefix = toParts[0];\n const toSuffix = toParts[1] || '';\n \n const targetExt = toSuffix.startsWith('.') ? toSuffix : (sourceExt + toSuffix);\n const targetFileName = sourceBase + targetExt;\n \n // Strip platform suffix from the final target filename\n const strippedTargetFileName = stripPlatformSuffixFromFilename(targetFileName);\n \n const resolvedTo = toPrefix + strippedTargetFileName;\n return path.join(context.workspaceRoot, resolvedTo);\n }\n \n // No glob in target - use as-is\n return path.join(context.workspaceRoot, toPattern);\n }\n\n /**\n * Get pipeline description\n */\n private getPipeline(flow: Flow): string[] {\n const pipeline: string[] = ['load'];\n\n if (flow.path) pipeline.push('extract');\n if (flow.pick) pipeline.push('pick');\n if (flow.omit) pipeline.push('omit');\n if (flow.map) pipeline.push('map');\n if (flow.embed) pipeline.push('embed');\n if (flow.merge) pipeline.push(`merge:${flow.merge}`);\n\n pipeline.push('write');\n\n return pipeline;\n }\n\n /**\n * Normalize from pattern for display in results\n * Converts array to comma-separated string\n */\n private normalizeFromPattern(pattern: string | string[] | SwitchExpression): string {\n if (isSwitchExpression(pattern)) {\n return '<switch>';\n }\n // Type narrowing: pattern is now string | string[]\n const narrowedPattern = pattern as string | string[];\n return Array.isArray(narrowedPattern) ? narrowedPattern.join(', ') : narrowedPattern;\n }\n\n /**\n * Normalize to pattern for display in results\n * Handles string, multi-target, and switch expressions\n */\n private normalizeToPattern(pattern: string | MultiTargetFlows | SwitchExpression): string {\n if (typeof pattern === 'string') {\n return pattern;\n }\n if (isSwitchExpression(pattern)) {\n return '<switch>';\n }\n return Object.keys(pattern as MultiTargetFlows).join(', ');\n }\n\n /**\n * Get the first pattern from a pattern or array of patterns\n * Used for path resolution when multiple sources are specified\n */\n private getFirstPattern(pattern: string | string[] | SwitchExpression): string {\n if (isSwitchExpression(pattern)) {\n throw new Error('Cannot get first pattern from SwitchExpression - expression must be resolved first');\n }\n // Type narrowing: pattern is now string | string[]\n const narrowedPattern = pattern as string | string[];\n return Array.isArray(narrowedPattern) ? narrowedPattern[0] : narrowedPattern;\n }\n\n /**\n * Aggregate multi-target results\n */\n private aggregateResults(results: FlowResult[], startTime: number): FlowResult {\n const successful = results.filter(r => r.success);\n const failed = results.filter(r => !r.success);\n\n if (failed.length > 0) {\n return {\n source: results[0]?.source || '',\n target: results.map(r => r.target).join(', '),\n success: false,\n transformed: results.some(r => r.transformed),\n error: failed[0]?.error,\n warnings: results.flatMap(r => r.warnings || []),\n executionTime: Date.now() - startTime,\n };\n }\n\n return {\n source: results[0]?.source || '',\n target: results.flatMap(r => typeof r.target === 'string' ? [r.target] : r.target),\n success: true,\n transformed: results.some(r => r.transformed),\n warnings: results.flatMap(r => r.warnings || []),\n conflicts: results.flatMap(r => r.conflicts || []),\n executionTime: Date.now() - startTime,\n };\n }\n\n /**\n * Parse markdown with frontmatter\n */\n private parseMarkdown(content: string): any {\n return parseMarkdownDocument(content);\n }\n\n /**\n * Serialize markdown with frontmatter\n */\n private serializeMarkdown(content: any): string {\n return serializeMarkdownDocument(content);\n }\n\n /**\n * Strip JSON comments\n */\n private stripJSONComments(content: string): string {\n // Remove single-line comments\n let result = content.replace(/\\/\\/.*$/gm, '');\n // Remove multi-line comments\n result = result.replace(/\\/\\*[\\s\\S]*?\\*\\//g, '');\n return result;\n }\n\n /**\n * Get nested value using dot notation (delegates to key mapper)\n */\n private getNestedValue(obj: any, path: string): any {\n return getNestedValue(obj, path);\n }\n\n /**\n * Set nested value using dot notation (delegates to key mapper)\n */\n private setNestedValue(obj: any, path: string, value: any): void {\n setNestedValue(obj, path, value);\n }\n\n /**\n * Delete nested value using dot notation (delegates to key mapper)\n */\n private deleteNestedValue(obj: any, path: string): void {\n deleteNestedValue(obj, path);\n }\n\n /**\n * Apply Codex-specific TOML formatting\n * Converts nested table sections to inline format for http_headers and env_http_headers\n */\n private applyCodexTomlFormatting(toml: string): string {\n const inlineKeys = ['http_headers', 'env_http_headers'];\n let result = toml;\n\n for (const key of inlineKeys) {\n // Pattern to match nested table sections for the key\n const pattern = new RegExp(\n `\\\\[([\\\\w-]+(?:\\\\.[\\\\w-]+|\\\\.\"[^\"]+\")*)?\\\\.${key}\\\\]\\\\s*\\\\n([\\\\s\\\\S]*?)(?=\\\\n\\\\[|\\\\n*$)`,\n 'g'\n );\n\n result = result.replace(pattern, (match, parentPath, content) => {\n const pairs: string[] = [];\n const lines = content.trim().split('\\n');\n\n for (const line of lines) {\n const trimmed = line.trim();\n if (!trimmed || trimmed.startsWith('#')) continue;\n\n const kvMatch = trimmed.match(/^([\\w-]+)\\s*=\\s*(.+)$/);\n if (kvMatch) {\n const [, k, v] = kvMatch;\n pairs.push(`\"${k}\" = ${v}`);\n }\n }\n\n if (pairs.length === 0) return match;\n\n const inlineTable = `{ ${pairs.join(', ')} }`;\n return `${key} = ${inlineTable}`;\n });\n }\n\n return result;\n }\n}\n\n/**\n * Create a flow executor instance\n */\nexport function createFlowExecutor(): FlowExecutor {\n return new DefaultFlowExecutor();\n}\n", "/**\n * Hash Utilities Module\n * Utility functions for content hashing and comparison\n */\n\nimport { xxhash3 } from 'hash-wasm';\nimport { logger } from './logger.js';\n\n/**\n * Calculate hash of file content using xxhash3\n */\nexport async function calculateFileHash(content: string): Promise<string> {\n try {\n return await xxhash3(content);\n } catch (error) {\n logger.warn(`Failed to calculate hash for content: ${error}`);\n // Return a fallback hash based on content length and first/last chars\n const fallback = `${content.length}-${content.charAt(0)}-${content.charAt(content.length - 1)}`;\n return fallback;\n }\n}\n\n/**\n * Calculate hash of file content synchronously (for simple cases)\n * Note: This is less performant but useful when async operations aren't needed\n */\nexport function calculateFileHashSync(content: string): string {\n try {\n // For sync operations, we'll use a simple hash based on content characteristics\n // In a real implementation, you might use a sync hash library\n const length = content.length;\n const firstChar = content.charCodeAt(0) || 0;\n const lastChar = content.charCodeAt(content.length - 1) || 0;\n\n // Simple hash combining length and character codes\n return `${length}-${firstChar}-${lastChar}`;\n } catch (error) {\n logger.warn(`Failed to calculate sync hash for content: ${error}`);\n return `fallback-${content.length}`;\n }\n}\n", "import { FILE_PATTERNS } from '../constants/index.js';\nimport type { Platform } from '../types/platform.js';\nimport {\n deepEqualYaml,\n deepMerge,\n extractInlinePlatformOverrides,\n splitFrontmatter,\n composeMarkdown\n} from './markdown-frontmatter.js';\n\n/**\n * Merge inline platform-specific override with universal content.\n * - Frontmatter contains common keys and platform-keyed override blocks.\n * - Produces merged frontmatter (common + override[targetPlatform]) and\n * removes platform blocks in the emitted markdown.\n */\nexport function mergeInlinePlatformOverride(\n universalContent: string,\n targetPlatform: Platform,\n cwd?: string\n): string {\n try {\n const trimmed = universalContent.trim();\n if (!trimmed.endsWith(FILE_PATTERNS.MD_FILES) && !trimmed.startsWith('---')) {\n // Fast path: only attempt merge for markdown with potential frontmatter\n return universalContent;\n }\n\n const { frontmatter, body, rawFrontmatter } = splitFrontmatter(universalContent);\n const { common, overridesByPlatform } = extractInlinePlatformOverrides(frontmatter, cwd);\n const override = overridesByPlatform.get(targetPlatform) ?? {};\n\n const mergedData = deepMerge(common, override);\n\n // Avoid reformatting if there's no change compared to base.\n // If frontmatter was present, check deep equality.\n if (rawFrontmatter !== undefined && deepEqualYaml(frontmatter, mergedData)) {\n return universalContent;\n }\n\n // If no frontmatter was present and merged result is empty, return original.\n if (rawFrontmatter === undefined && Object.keys(mergedData).length === 0) {\n return universalContent;\n }\n\n return composeMarkdown(mergedData, body);\n } catch {\n return universalContent;\n }\n}\n", "/**\n * Flow Key Mapper\n * \n * Implements sophisticated key transformations for the flow execution pipeline.\n * Supports dot notation, wildcard patterns, value transforms, lookup tables, and defaults.\n */\n\nimport { logger } from '../../utils/logger.js';\nimport { defaultTransformRegistry } from './flow-transforms.js';\nimport type { KeyMap, KeyMapConfig, FlowContext } from '../../types/flows.js';\n\n/**\n * Map a single key (simple or nested with dot notation)\n */\nfunction mapSingleKey(\n obj: any,\n sourceKey: string,\n targetConfig: string | KeyMapConfig,\n result: any,\n context: FlowContext,\n mappedKeys: Set<string>\n): void {\n const target = typeof targetConfig === 'string' ? targetConfig : targetConfig.to;\n const config = typeof targetConfig === 'object' ? targetConfig : undefined;\n\n // Get source value\n const value = getNestedValue(obj, sourceKey);\n\n // Track that we've processed this key\n markKeyAsMapped(sourceKey, mappedKeys);\n\n // Handle missing value\n if (value === undefined) {\n // Use default value if provided\n if (config?.default !== undefined) {\n const transformedValue = applyValueTransform(config.default, config, context);\n setNestedValue(result, target, transformedValue);\n } else if (config?.required) {\n logger.warn(`Required key missing: ${sourceKey}`);\n }\n return;\n }\n\n // Transform the value\n const transformedValue = applyValueTransform(value, config, context);\n\n // Skip if value is null/undefined and not required\n if ((transformedValue === null || transformedValue === undefined) && config?.required === false) {\n return;\n }\n\n // Set the transformed value in result\n setNestedValue(result, target, transformedValue);\n}\n\n/**\n * Map keys using wildcard patterns\n * \n * Examples:\n * - \"ai.*\" \u2192 \"cursor.*\" (map all keys under ai to cursor)\n * - \"servers.*\" \u2192 \"mcp.servers.*\" (map to nested path)\n */\nfunction mapWildcard(\n obj: any,\n pattern: string,\n targetConfig: string | KeyMapConfig,\n result: any,\n context: FlowContext,\n mappedKeys: Set<string>\n): void {\n const targetPattern = typeof targetConfig === 'string' ? targetConfig : targetConfig.to;\n const config = typeof targetConfig === 'object' ? targetConfig : undefined;\n\n // Parse pattern and target\n const { prefix: sourcePrefix, suffix: sourceSuffix } = parseWildcardPattern(pattern);\n const { prefix: targetPrefix, suffix: targetSuffix } = parseWildcardPattern(targetPattern);\n\n // Get all matching keys\n const matchingKeys = getMatchingKeys(obj, sourcePrefix, sourceSuffix);\n\n for (const key of matchingKeys) {\n // Extract the wildcard part\n const wildcardPart = extractWildcardPart(key, sourcePrefix, sourceSuffix);\n\n // Construct target key\n const targetKey = targetPrefix + wildcardPart + targetSuffix;\n\n // Get value\n const value = getNestedValue(obj, key);\n\n // Track as mapped\n markKeyAsMapped(key, mappedKeys);\n\n // Transform value\n if (value !== undefined) {\n const transformedValue = applyValueTransform(value, config, context);\n setNestedValue(result, targetKey, transformedValue);\n }\n }\n}\n\n/**\n * Parse wildcard pattern into prefix and suffix\n * \n * Examples:\n * - \"ai.*\" \u2192 { prefix: \"ai.\", suffix: \"\" }\n * - \"*.value\" \u2192 { prefix: \"\", suffix: \".value\" }\n * - \"servers.*.config\" \u2192 { prefix: \"servers.\", suffix: \".config\" }\n */\nfunction parseWildcardPattern(pattern: string): { prefix: string; suffix: string } {\n const wildcardIndex = pattern.indexOf('*');\n \n if (wildcardIndex === -1) {\n return { prefix: pattern, suffix: '' };\n }\n\n const prefix = pattern.substring(0, wildcardIndex);\n const suffix = pattern.substring(wildcardIndex + 1);\n\n return { prefix, suffix };\n}\n\n/**\n * Get all keys matching a wildcard pattern\n */\nfunction getMatchingKeys(obj: any, prefix: string, suffix: string): string[] {\n const flatKeys = getFlatKeys(obj);\n \n return flatKeys.filter(key => {\n if (prefix && !key.startsWith(prefix)) {\n return false;\n }\n if (suffix && !key.endsWith(suffix)) {\n return false;\n }\n return true;\n });\n}\n\n/**\n * Extract the wildcard part from a key\n */\nfunction extractWildcardPart(key: string, prefix: string, suffix: string): string {\n let result = key;\n \n if (prefix) {\n result = result.substring(prefix.length);\n }\n \n if (suffix) {\n result = result.substring(0, result.length - suffix.length);\n }\n \n return result;\n}\n\n/**\n * Get all flat keys (dot notation) from an object\n */\nfunction getFlatKeys(obj: any, prefix = ''): string[] {\n const keys: string[] = [];\n\n if (typeof obj !== 'object' || obj === null) {\n return [];\n }\n\n for (const [key, value] of Object.entries(obj)) {\n const fullKey = prefix ? `${prefix}.${key}` : key;\n keys.push(fullKey);\n\n // Recursively get nested keys\n if (typeof value === 'object' && value !== null && !Array.isArray(value)) {\n keys.push(...getFlatKeys(value, fullKey));\n }\n }\n\n return keys;\n}\n\n/**\n * Apply value transformations\n * \n * Applies transforms and value lookups in order:\n * 1. Apply value lookup table (if provided)\n * 2. Apply transform functions (if provided)\n */\nfunction applyValueTransform(\n value: any,\n config: KeyMapConfig | undefined,\n context: FlowContext\n): any {\n if (!config) {\n return value;\n }\n\n let result = value;\n\n // Apply value lookup table first\n if (config.values && result in config.values) {\n result = config.values[result];\n }\n\n // Apply transform functions\n if (config.transform) {\n result = applyTransforms(result, config.transform, context);\n }\n\n return result;\n}\n\n/**\n * Apply one or more transforms to a value\n */\nfunction applyTransforms(value: any, transforms: string | string[], context: FlowContext): any {\n const transformList = Array.isArray(transforms) ? transforms : [transforms];\n \n let result = value;\n\n for (const transformName of transformList) {\n try {\n // Check if transform exists\n if (defaultTransformRegistry.has(transformName)) {\n result = defaultTransformRegistry.execute(transformName, result);\n } else {\n logger.warn(`Unknown transform: ${transformName}`);\n }\n } catch (error) {\n const errorMsg = error instanceof Error ? error.message : String(error);\n logger.warn(`Transform '${transformName}' failed: ${errorMsg}`);\n // Continue with unmodified value on error\n }\n }\n\n return result;\n}\n\n/**\n * Get nested value using dot notation\n * \n * Examples:\n * - getNestedValue({ a: { b: 1 } }, \"a.b\") \u2192 1\n * - getNestedValue({ a: { b: 1 } }, \"a.c\") \u2192 undefined\n */\nexport function getNestedValue(obj: any, path: string): any {\n if (!path) {\n return obj;\n }\n\n const keys = path.split('.');\n let current = obj;\n\n for (const key of keys) {\n if (current && typeof current === 'object' && key in current) {\n current = current[key];\n } else {\n return undefined;\n }\n }\n\n return current;\n}\n\n/**\n * Set nested value using dot notation\n * \n * Examples:\n * - setNestedValue({}, \"a.b\", 1) \u2192 { a: { b: 1 } }\n * - setNestedValue({ a: {} }, \"a.b\", 1) \u2192 { a: { b: 1 } }\n */\nexport function setNestedValue(obj: any, path: string, value: any): void {\n if (!path) {\n return;\n }\n\n const keys = path.split('.');\n let current = obj;\n\n // Navigate/create nested structure\n for (let i = 0; i < keys.length - 1; i++) {\n const key = keys[i];\n \n if (!(key in current) || typeof current[key] !== 'object') {\n current[key] = {};\n }\n \n current = current[key];\n }\n\n // Set final value\n const finalKey = keys[keys.length - 1];\n current[finalKey] = value;\n}\n\n/**\n * Delete nested value using dot notation\n */\nexport function deleteNestedValue(obj: any, path: string): void {\n if (!path) {\n return;\n }\n\n const keys = path.split('.');\n let current = obj;\n\n // Navigate to parent\n for (let i = 0; i < keys.length - 1; i++) {\n const key = keys[i];\n \n if (!(key in current) || typeof current[key] !== 'object') {\n return; // Path doesn't exist\n }\n \n current = current[key];\n }\n\n // Delete final key\n const finalKey = keys[keys.length - 1];\n delete current[finalKey];\n}\n\n/**\n * Mark a key (and its nested parent keys) as mapped\n * \n * Examples:\n * - markKeyAsMapped(\"a.b.c\", set) adds \"a\", \"a.b\", \"a.b.c\" to set\n */\nfunction markKeyAsMapped(key: string, mappedKeys: Set<string>): void {\n mappedKeys.add(key);\n\n // Also mark parent keys as mapped\n const parts = key.split('.');\n for (let i = 1; i < parts.length; i++) {\n const parentKey = parts.slice(0, i).join('.');\n mappedKeys.add(parentKey);\n }\n}\n\n/**\n * Copy unmapped keys from source to result\n * \n * This preserves keys that are not explicitly mapped.\n */\nfunction copyUnmappedKeys(source: any, result: any, mappedKeys: Set<string>): void {\n if (typeof source !== 'object' || source === null) {\n return;\n }\n\n // If result is an array, don't copy unmapped keys\n if (Array.isArray(result)) {\n return;\n }\n\n for (const key of Object.keys(source)) {\n if (!mappedKeys.has(key) && !(key in result)) {\n result[key] = source[key];\n }\n }\n}\n\n\n", "/**\n * Flow Key Extractor\n * \n * Utilities for extracting and tracking keys contributed by flows.\n * Used for precise removal during uninstall of merged files.\n */\n\n/**\n * Extract all keys from an object (including nested paths with dot notation)\n * Used for tracking which keys a package contributes during merge operations\n * \n * @param data - The data object to extract keys from\n * @param prefix - The current path prefix (for recursion)\n * @returns Array of dot-notated key paths\n * \n * @example\n * extractAllKeys({ mcp: { server1: {}, server2: {} } })\n * // Returns: [\"mcp.server1\", \"mcp.server2\"]\n */\nexport function extractAllKeys(data: any, prefix: string = ''): string[] {\n if (typeof data !== 'object' || data === null) {\n return prefix ? [prefix] : [];\n }\n\n // Handle arrays - track the array itself, not individual elements\n if (Array.isArray(data)) {\n return prefix ? [prefix] : [];\n }\n\n const keys: string[] = [];\n for (const key of Object.keys(data)) {\n const fullPath = prefix ? `${prefix}.${key}` : key;\n\n if (typeof data[key] === 'object' && data[key] !== null && !Array.isArray(data[key])) {\n // Recurse into nested objects\n keys.push(...extractAllKeys(data[key], fullPath));\n } else {\n // Leaf key (primitive, array, or null)\n keys.push(fullPath);\n }\n }\n\n return keys;\n}\n\n/**\n * Delete a key from an object using dot notation\n * Also cleans up empty parent objects\n * \n * @param obj - The object to modify\n * @param keyPath - Dot-notated path to the key\n * \n * @example\n * const obj = { mcp: { server1: {}, server2: {} } };\n * deleteNestedKey(obj, \"mcp.server1\");\n * // obj is now: { mcp: { server2: {} } }\n */\nexport function deleteNestedKey(obj: any, keyPath: string): void {\n const parts = keyPath.split('.');\n const last = parts.pop()!;\n\n let current = obj;\n const path: any[] = [obj];\n \n for (const part of parts) {\n if (!current[part] || typeof current[part] !== 'object') {\n return; // Path doesn't exist\n }\n current = current[part];\n path.push(current);\n }\n\n delete current[last];\n\n // Clean up empty parent objects (bottom-up)\n for (let i = path.length - 1; i > 0; i--) {\n const parent = path[i];\n if (Object.keys(parent).length === 0) {\n // Find the key in the parent's parent\n const grandparent = path[i - 1];\n const keyInGrandparent = parts[i - 1];\n if (keyInGrandparent) {\n delete grandparent[keyInGrandparent];\n }\n } else {\n break; // Stop when we find a non-empty parent\n }\n }\n}\n\n/**\n * Check if an object is effectively empty (recursively)\n * \n * @param data - The data to check\n * @returns True if empty or all nested objects are empty\n */\nexport function isEffectivelyEmpty(data: any): boolean {\n if (data === null || data === undefined) return true;\n if (typeof data !== 'object') return false;\n if (Array.isArray(data)) return data.length === 0;\n\n const keys = Object.keys(data);\n if (keys.length === 0) return true;\n\n // Check if all nested objects are also empty\n return keys.every(key => isEffectivelyEmpty(data[key]));\n}\n", "/**\n * Source Pattern Resolver\n * \n * Resolves source file patterns (including arrays with priority) to actual file paths.\n * Supports glob patterns and priority-based pattern selection.\n */\n\nimport { promises as fs } from 'fs';\nimport path from 'path';\nimport { minimatch } from 'minimatch';\nimport * as fsUtils from '../../utils/fs.js';\nimport { logger } from '../../utils/logger.js';\n\n/**\n * Result of source pattern resolution\n */\nexport interface SourceResolutionResult {\n /** Resolved file paths that matched */\n paths: string[];\n \n /** Warnings generated during resolution */\n warnings: string[];\n \n /** Pattern that matched (for multi-pattern scenarios) */\n matchedPattern?: string;\n \n /** Patterns that were skipped due to lower priority */\n skippedPatterns?: string[];\n}\n\n/**\n * Options for source resolution\n */\nexport interface SourceResolutionOptions {\n /** Base directory for resolving relative patterns */\n baseDir: string;\n \n /** Whether to log warnings for skipped patterns */\n logWarnings?: boolean;\n}\n\n/**\n * Source Pattern Resolver\n * \n * Handles resolution of single patterns, arrays of patterns (with priority),\n * and glob patterns to actual file paths.\n */\nexport class SourcePatternResolver {\n /**\n * Resolve source pattern(s) to file paths\n * \n * For single patterns: returns all matching files\n * For array patterns: returns files from first matching pattern only (priority order)\n * \n * @param pattern - Single pattern or array of patterns with priority\n * @param options - Resolution options\n * @returns Resolution result with paths and warnings\n */\n async resolve(\n pattern: string | string[],\n options: SourceResolutionOptions\n ): Promise<SourceResolutionResult> {\n // Handle array of patterns with priority\n if (Array.isArray(pattern)) {\n return this.resolveWithPriority(pattern, options);\n }\n \n // Single pattern\n const paths = await this.resolveSinglePattern(pattern, options.baseDir);\n return {\n paths,\n warnings: [],\n };\n }\n\n /**\n * Resolve array of patterns with priority (first match wins)\n * \n * Iterates through patterns in order, returning files from first matching pattern.\n * Logs warnings if lower-priority patterns also match.\n * \n * @param patterns - Array of patterns in priority order\n * @param options - Resolution options\n * @returns Resolution result\n */\n private async resolveWithPriority(\n patterns: string[],\n options: SourceResolutionOptions\n ): Promise<SourceResolutionResult> {\n if (patterns.length === 0) {\n return { paths: [], warnings: ['Empty pattern array provided'] };\n }\n\n const warnings: string[] = [];\n const skippedPatterns: string[] = [];\n\n // Try each pattern in priority order\n for (let i = 0; i < patterns.length; i++) {\n const currentPattern = patterns[i];\n const matches = await this.resolveSinglePattern(currentPattern, options.baseDir);\n\n if (matches.length > 0) {\n // Found matches - check if lower-priority patterns also match\n for (let j = i + 1; j < patterns.length; j++) {\n const lowerPriorityPattern = patterns[j];\n const lowerMatches = await this.resolveSinglePattern(\n lowerPriorityPattern,\n options.baseDir\n );\n\n if (lowerMatches.length > 0) {\n const warning = `Pattern \"${currentPattern}\" matched (priority ${i + 1}). ` +\n `Ignoring lower-priority pattern \"${lowerPriorityPattern}\" (priority ${j + 1}) ` +\n `which also matched ${lowerMatches.length} file(s).`;\n \n warnings.push(warning);\n skippedPatterns.push(lowerPriorityPattern);\n\n if (options.logWarnings !== false) {\n logger.debug(warning);\n }\n }\n }\n\n return {\n paths: matches,\n warnings,\n matchedPattern: currentPattern,\n skippedPatterns: skippedPatterns.length > 0 ? skippedPatterns : undefined,\n };\n }\n }\n\n // No patterns matched\n return {\n paths: [],\n warnings: [`No files matched any of the ${patterns.length} pattern(s): ${patterns.join(', ')}`],\n };\n }\n\n /**\n * Resolve a single pattern to file paths\n * \n * Handles both glob patterns and literal file paths.\n * \n * @param pattern - File pattern (may contain globs)\n * @param baseDir - Base directory for resolution\n * @returns Array of resolved file paths\n */\n private async resolveSinglePattern(pattern: string, baseDir: string): Promise<string[]> {\n // Check if pattern contains glob wildcard\n if (this.isGlobPattern(pattern)) {\n return this.resolveGlobPattern(pattern, baseDir);\n }\n\n // Literal file path - check if it exists\n const resolved = path.join(baseDir, pattern);\n const exists = await fsUtils.exists(resolved);\n\n return exists ? [resolved] : [];\n }\n\n /**\n * Check if pattern contains glob wildcards\n * \n * @param pattern - Pattern to check\n * @returns True if pattern contains glob syntax\n */\n private isGlobPattern(pattern: string): boolean {\n return pattern.includes('*') || pattern.includes('?') || pattern.includes('[');\n }\n\n /**\n * Resolve glob pattern to matching files\n * \n * @param pattern - Glob pattern\n * @param baseDir - Base directory\n * @returns Array of matching file paths\n */\n private async resolveGlobPattern(pattern: string, baseDir: string): Promise<string[]> {\n const matches: string[] = [];\n\n // Extract directory and file pattern\n const parts = pattern.split('/');\n const globPart = parts.findIndex(p => this.isGlobPattern(p));\n\n if (globPart === -1) {\n // No glob found (shouldn't happen, but handle gracefully)\n return [path.join(baseDir, pattern)];\n }\n\n // Build directory path up to first glob\n const dirPath = path.join(baseDir, ...parts.slice(0, globPart));\n const filePattern = parts.slice(globPart).join('/');\n\n // Check if directory exists\n if (!await fsUtils.exists(dirPath)) {\n return [];\n }\n\n // Recursively find matching files\n await this.findMatchingFiles(dirPath, filePattern, baseDir, matches);\n\n return matches;\n }\n\n /**\n * Recursively find files matching glob pattern\n * \n * Supports ** for recursive directory matching.\n * \n * @param dir - Current directory\n * @param pattern - Pattern to match\n * @param baseDir - Base directory for relative paths\n * @param matches - Array to accumulate matches\n */\n private async findMatchingFiles(\n dir: string,\n pattern: string,\n baseDir: string,\n matches: string[]\n ): Promise<void> {\n try {\n const entries = await fs.readdir(dir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = path.join(dir, entry.name);\n const relativePath = path.relative(baseDir, fullPath);\n\n if (entry.isDirectory()) {\n // Always recurse for ** patterns\n if (pattern.startsWith('**') || pattern.includes('/**/')) {\n await this.findMatchingFiles(fullPath, pattern, baseDir, matches);\n } else if (pattern.includes('/')) {\n // For patterns with subdirs, continue searching\n await this.findMatchingFiles(fullPath, pattern, baseDir, matches);\n }\n } else if (entry.isFile()) {\n // Test file against pattern\n if (minimatch(relativePath, pattern, { dot: false })) {\n matches.push(fullPath);\n }\n }\n }\n } catch (error) {\n // Ignore errors (directory not accessible, etc.)\n logger.debug(`Error reading directory ${dir}: ${error}`);\n }\n }\n}\n\n/**\n * Create a source pattern resolver instance\n */\nexport function createSourceResolver(): SourcePatternResolver {\n return new SourcePatternResolver();\n}\n", "/**\n * Source Schema Validator\n *\n * Validates parsed frontmatter against a schema's property type declarations.\n * Used at the export pipeline entry to warn when source data doesn't match\n * the declared universal schema (e.g., tools is a string instead of an array).\n */\n\nimport type { DetectionSchema, SchemaProperty } from '../install/detection-types.js';\n\n/**\n * Validate parsed frontmatter against a schema's property type declarations.\n * Returns an array of human-readable warning strings.\n *\n * Only checks type mismatches for properties that exist in both the data and the schema.\n * Does NOT enforce required fields or complex constraints.\n *\n * @param data - Parsed frontmatter object\n * @param schema - Detection schema with property type declarations\n * @param filePath - Source file path for warning messages\n * @returns Array of warning strings (empty if valid)\n */\nexport function validateFrontmatterAgainstSchema(\n data: Record<string, any>,\n schema: DetectionSchema,\n filePath: string,\n): string[] {\n const warnings: string[] = [];\n if (!schema.properties) return warnings;\n\n for (const [key, schemaProp] of Object.entries(schema.properties)) {\n if (!(key in data)) continue;\n const warning = checkTypeMatch(data[key], schemaProp, key, filePath);\n if (warning) {\n warnings.push(warning);\n }\n }\n return warnings;\n}\n\n/**\n * Check if a value's runtime type matches the schema's declared type.\n */\nfunction checkTypeMatch(\n value: any,\n schemaProp: SchemaProperty,\n fieldName: string,\n filePath: string,\n): string | null {\n if (!schemaProp.type) return null;\n\n const expectedTypes = Array.isArray(schemaProp.type)\n ? schemaProp.type\n : [schemaProp.type];\n\n const actualType = getJsonSchemaType(value);\n\n if (!expectedTypes.includes(actualType)) {\n return `${filePath}: \"${fieldName}\" is ${actualType}, expected ${expectedTypes.join(' | ')}`;\n }\n return null;\n}\n\n/**\n * Map a JavaScript value to its JSON Schema type name.\n */\nfunction getJsonSchemaType(value: any): string {\n if (value === null) return 'null';\n if (Array.isArray(value)) return 'array';\n return typeof value; // 'string', 'number', 'boolean', 'object'\n}\n", "/**\n * Resource Query\n *\n * Parse user input into name + optional type filter for resource lookups.\n * Extracted from which-pipeline.ts for reuse across resource-spec, list, etc.\n */\n\nimport { normalizeType } from './resource-registry.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface ResourceQuery {\n /** Raw input from user */\n raw: string;\n /** Extracted resource name */\n name: string;\n /** Optional type filter from qualified input (e.g. \"skills/skill-dev\" \u2192 \"skill\") */\n typeFilter?: string;\n}\n\n// ---------------------------------------------------------------------------\n// Query parsing\n// ---------------------------------------------------------------------------\n\n/**\n * Parse a user query into name + optional type filter.\n *\n * - Bare name: `skill-dev` \u2192 { name: \"skill-dev\" }\n * - Qualified: `skills/skill-dev` \u2192 { name: \"skill-dev\", typeFilter: \"skill\" }\n */\nexport function parseResourceQuery(input: string): ResourceQuery {\n const slashIndex = input.indexOf('/');\n if (slashIndex === -1) {\n return { raw: input, name: input };\n }\n\n const prefix = input.slice(0, slashIndex);\n const name = input.slice(slashIndex + 1);\n\n if (!name) {\n return { raw: input, name: input };\n }\n\n const typeFilter = normalizeType(prefix);\n // If normalizeType falls back to 'other', the prefix wasn't a known type\n if (typeFilter === 'other') {\n return { raw: input, name: input };\n }\n\n return { raw: input, name, typeFilter };\n}\n", "/**\n * Untracked Files Scanner\n * \n * Discovers files in workspace that match platform patterns but are not tracked\n * in the workspace index (.openpackage/openpackage.index.yml).\n * \n * Uses static-prefix extraction to determine minimal walk roots from patterns,\n * then delegates to fast-glob for efficient, bounded directory traversal.\n * This prevents unbounded walks when workspaceRoot is ~ (home directory).\n */\n\nimport { basename, join } from 'path';\nimport { homedir } from 'os';\nimport fg from 'fast-glob';\nimport { minimatch } from 'minimatch';\nimport { isJunk } from 'junk';\nimport type { Platform } from '../platforms.js';\nimport { getDetectedPlatforms, getPlatformDefinition } from '../platforms.js';\nimport { readWorkspaceIndex } from '../../utils/workspace-index-yml.js';\nimport { resolveDeclaredPath } from '../../utils/path-resolution.js';\nimport { normalizePathForProcessing } from '../../utils/path-normalization.js';\nimport { normalizePlatforms } from '../platform/platform-mapper.js';\nimport { logger } from '../../utils/logger.js';\nimport type { Flow, SwitchExpression } from '../../types/flows.js';\n\n/**\n * Represents a file discovered in the workspace but not tracked in the index\n */\nexport interface UntrackedFile {\n /** Absolute path to the file */\n absolutePath: string;\n /** Path relative to workspace root */\n workspacePath: string;\n /** Platform that detected this file */\n platform: Platform;\n /** Flow pattern that matched this file */\n flowPattern: string;\n /** Category derived from pattern (rules, commands, agents, etc.) */\n category: string;\n}\n\n/**\n * Result of scanning for untracked files\n */\nexport interface UntrackedScanResult {\n /** All untracked files discovered */\n files: UntrackedFile[];\n /** Files grouped by platform */\n platformGroups: Map<Platform, UntrackedFile[]>;\n /** Files grouped by category */\n categoryGroups: Map<string, UntrackedFile[]>;\n /** Total count of untracked files */\n totalFiles: number;\n}\n\n/**\n * Pattern info extracted from a flow\n */\ninterface PatternInfo {\n pattern: string;\n platform: Platform;\n flow: Flow;\n category: string;\n}\n\n/**\n * Scan workspace for files that match platform patterns but are not tracked in index\n * \n * @param workspaceRoot - Root directory of the workspace\n * @param platformFilter - Optional array of platform names to filter by\n * @returns Scan result with all untracked files\n */\nexport async function scanUntrackedFiles(\n workspaceRoot: string,\n platformFilter?: string[]\n): Promise<UntrackedScanResult> {\n logger.debug('Starting untracked files scan', { workspaceRoot, platformFilter });\n\n // Step 1: Detect platforms in workspace\n let platforms = await getDetectedPlatforms(workspaceRoot);\n \n // Apply platform filter if specified\n const normalizedFilter = normalizePlatforms(platformFilter);\n if (normalizedFilter && normalizedFilter.length > 0) {\n platforms = platforms.filter(p => normalizedFilter.includes(p.toLowerCase()));\n logger.debug(`Filtered to platforms: ${platforms.join(', ') || 'none'}`);\n } else {\n logger.debug(`Detected platforms: ${platforms.join(', ') || 'none'}`);\n }\n \n if (platforms.length === 0) {\n logger.debug('No platforms detected in workspace');\n return createEmptyResult();\n }\n\n // Step 2: Extract patterns from platform flows\n const patterns = extractPatternsFromPlatforms(platforms, workspaceRoot);\n logger.debug(`Extracted ${patterns.length} patterns from ${platforms.length} platforms`);\n\n // Step 3: Discover files matching patterns\n const discoveredFiles = await discoverFilesFromPatterns(patterns, workspaceRoot);\n logger.debug(`Discovered ${discoveredFiles.size} unique files from patterns`);\n\n // Step 4: Load tracked files from workspace index\n const trackedPaths = await loadTrackedFilePaths(workspaceRoot);\n logger.debug(`Loaded ${trackedPaths.size} tracked files from index`);\n\n // Step 5: Filter to untracked files only\n const untrackedFiles = filterUntrackedFiles(discoveredFiles, trackedPaths);\n logger.debug(`Filtered to ${untrackedFiles.length} untracked files`);\n\n // Step 6: Group results\n return groupUntrackedFiles(untrackedFiles);\n}\n\n/**\n * Extract patterns from all platform export flows\n * Export flows represent package \u2192 workspace direction (the 'to' field is workspace location)\n */\nfunction extractPatternsFromPlatforms(\n platforms: Platform[],\n workspaceRoot: string\n): PatternInfo[] {\n const patterns: PatternInfo[] = [];\n\n for (const platform of platforms) {\n const definition = getPlatformDefinition(platform, workspaceRoot);\n \n // Process export flows (these define workspace file locations)\n for (const flow of definition.export) {\n if (flow.fallback) continue; // Catch-all flows don't define discoverable locations\n const patternStrings = extractToPatterns(flow, workspaceRoot);\n \n for (const pattern of patternStrings) {\n // Extract category from the 'from' field (universal format) instead of 'to'\n const category = extractCategoryFromFlow(flow);\n patterns.push({\n pattern,\n platform,\n flow,\n category\n });\n }\n }\n }\n\n return patterns;\n}\n\n/**\n * Extract category from flow's 'from' pattern (universal format)\n * The 'from' field represents the universal resource type which is consistent\n * across all platforms, unlike the 'to' field which is platform-specific.\n */\nfunction extractCategoryFromFlow(flow: Flow): string {\n const fromField = flow.from;\n \n // Handle string pattern\n let fromPattern: string;\n if (typeof fromField === 'string') {\n fromPattern = fromField;\n } else if (Array.isArray(fromField)) {\n // Use first pattern from array\n fromPattern = fromField[0] || '';\n } else if (typeof fromField === 'object' && '$switch' in fromField) {\n // For switch expressions, we can't determine the exact pattern without context\n // Fall back to extracting from all case patterns\n const switchExpr = fromField as SwitchExpression;\n \n // Try default first\n if (switchExpr.$switch.default) {\n if (typeof switchExpr.$switch.default === 'string') {\n fromPattern = switchExpr.$switch.default;\n } else if (typeof switchExpr.$switch.default === 'object' && 'pattern' in switchExpr.$switch.default) {\n fromPattern = switchExpr.$switch.default.pattern;\n } else {\n return 'other';\n }\n } else if (switchExpr.$switch.cases && switchExpr.$switch.cases.length > 0) {\n // Use first case as fallback\n const firstCase = switchExpr.$switch.cases[0].value;\n if (typeof firstCase === 'string') {\n fromPattern = firstCase;\n } else if (typeof firstCase === 'object' && 'pattern' in firstCase) {\n fromPattern = firstCase.pattern;\n } else {\n return 'other';\n }\n } else {\n return 'other';\n }\n } else if (typeof fromField === 'object' && 'pattern' in fromField) {\n // Handle object with pattern field (e.g., { pattern: \"agents/**/*.md\", schema: \"...\" })\n const patternObj = fromField as { pattern: string };\n fromPattern = patternObj.pattern;\n } else {\n return 'other';\n }\n \n // Extract first directory component from pattern\n // Examples:\n // \"skills/**/*\" \u2192 \"skills\"\n // \"rules/**/*.md\" \u2192 \"rules\"\n // \"commands/**/*.md\" \u2192 \"commands\"\n // \"mcp.jsonc\" \u2192 \"mcps\"\n // \"AGENTS.md\" \u2192 \"agents\"\n const parts = fromPattern.split('/');\n const firstPart = parts[0];\n \n // Handle file-based resources\n if (firstPart.includes('.')) {\n // Special cases for known files\n if (firstPart === 'mcp.jsonc' || firstPart === 'mcp.json') {\n return 'mcps';\n }\n if (firstPart === 'AGENTS.md') {\n return 'agents';\n }\n if (firstPart === 'CLAUDE.md' || firstPart === 'QWEN.md' || firstPart === 'WARP.md') {\n return 'agents';\n }\n // For other files, extract base name\n const baseName = firstPart.replace(/\\.[^.]+$/, '').toLowerCase();\n return baseName || 'other';\n }\n \n // For directory-based resources, return the directory name\n return firstPart || 'other';\n}\n\n/**\n * Extract 'to' patterns from a flow (handling switch expressions)\n * Resolves switch expressions based on workspaceRoot context\n */\nfunction extractToPatterns(flow: Flow, workspaceRoot: string): string[] {\n const toField = flow.to;\n\n // Handle switch expressions - resolve based on workspaceRoot\n if (typeof toField === 'object' && '$switch' in toField) {\n const switchExpr = toField as SwitchExpression;\n const patterns: string[] = [];\n \n // Check if we're in global mode\n const normalizedRoot = workspaceRoot.replace(/\\/+$/, '');\n const isGlobal = normalizedRoot === homedir();\n \n // Try to match a case first\n if (switchExpr.$switch.cases) {\n for (const caseItem of switchExpr.$switch.cases) {\n // Check if this case matches our context\n if (caseItem.pattern === '~/' && isGlobal) {\n // Extract the actual pattern value\n if (typeof caseItem.value === 'string') {\n return [caseItem.value];\n } else if (typeof caseItem.value === 'object' && 'pattern' in caseItem.value) {\n return [caseItem.value.pattern];\n }\n }\n }\n }\n \n // Fall back to default\n if (switchExpr.$switch.default) {\n if (typeof switchExpr.$switch.default === 'string') {\n return [switchExpr.$switch.default];\n } else if (typeof switchExpr.$switch.default === 'object' && 'pattern' in switchExpr.$switch.default) {\n return [switchExpr.$switch.default.pattern];\n }\n }\n \n // If no match, collect all possible patterns for safety\n if (switchExpr.$switch.cases) {\n for (const caseItem of switchExpr.$switch.cases) {\n if (typeof caseItem.value === 'string') {\n patterns.push(caseItem.value);\n } else if (typeof caseItem.value === 'object' && 'pattern' in caseItem.value) {\n patterns.push(caseItem.value.pattern);\n }\n }\n }\n \n return patterns;\n }\n\n // Handle object with pattern field\n if (typeof toField === 'object' && 'pattern' in toField && typeof toField.pattern === 'string') {\n return [toField.pattern];\n }\n\n // Handle string pattern\n if (typeof toField === 'string') {\n return [toField];\n }\n\n // Handle array patterns\n if (Array.isArray(toField)) {\n return toField.filter((p): p is string => typeof p === 'string');\n }\n\n return [];\n}\n\n/**\n * Extract category from pattern (e.g., \"rules\", \"commands\", \"agents\")\n */\nfunction extractCategoryFromPattern(pattern: string): string {\n // Remove leading dot and platform root dir (e.g., \".claude/rules/...\" -> \"rules\")\n const normalized = pattern.replace(/^\\.[^/]+\\//, '');\n \n // Extract first directory component\n const parts = normalized.split('/');\n if (parts.length > 1) {\n return parts[0];\n }\n \n // For root-level files, use \"config\" or filename without extension\n if (pattern.includes('.')) {\n const filename = pattern.split('/').pop() || pattern;\n const baseName = filename.replace(/^\\.[^.]*\\./, '').split('.')[0];\n return baseName || 'config';\n }\n \n return 'other';\n}\n\n/**\n * Extract the static (non-glob) prefix directory from a pattern.\n * Consumes path segments until a segment containing glob metacharacters is found.\n * \n * Examples:\n * \".claude/rules/*.md\" -> { root: \".claude/rules\", rootOnly: false }\n * \".cursor/rules/*.md\" -> { root: \".cursor/rules\", rootOnly: false }\n * \"AGENTS.md\" -> { root: null, rootOnly: true }\n * \"**\\/*.md\" -> { root: null, rootOnly: false } (unsafe)\n */\nexport function extractStaticWalkRoot(pattern: string): { root: string | null; rootOnly: boolean } {\n const normalized = pattern.replace(/\\\\/g, '/');\n\n if (!normalized.includes('/')) {\n return { root: null, rootOnly: true };\n }\n\n const segments = normalized.split('/');\n const hasGlobMeta = (seg: string) => /[*?\\[\\]{}()!+@]/.test(seg);\n const staticSegments: string[] = [];\n\n for (const seg of segments) {\n if (!seg) continue;\n if (hasGlobMeta(seg)) break;\n staticSegments.push(seg);\n }\n\n if (staticSegments.length === 0) {\n return { root: null, rootOnly: false };\n }\n\n return { root: staticSegments.join('/'), rootOnly: false };\n}\n\nconst IGNORED_DIRS = ['**/.openpackage/**', '**/node_modules/**', '**/.git/**'];\n\n/**\n * Discover files matching all patterns using fast-glob with static-prefix scoping.\n *\n * Strategy:\n * 1. Extract static walk roots from each pattern to avoid unbounded traversal\n * 2. Group patterns by walk root for efficient single-pass scanning\n * 3. Use fast-glob scoped to each walk root\n * 4. For root-only patterns (e.g. \"AGENTS.md\"), scan only immediate children\n * 5. Always skip unbounded patterns (no static prefix) \u2014 fallback flows are filtered earlier\n */\nasync function discoverFilesFromPatterns(\n patterns: PatternInfo[],\n workspaceRoot: string\n): Promise<Map<string, UntrackedFile>> {\n const filesMap = new Map<string, UntrackedFile>();\n\n const rootOnlyPatterns: PatternInfo[] = [];\n const unsafePatterns: PatternInfo[] = [];\n const rootedGroups = new Map<string, PatternInfo[]>();\n\n for (const patternInfo of patterns) {\n const { root, rootOnly } = extractStaticWalkRoot(patternInfo.pattern);\n\n if (rootOnly) {\n rootOnlyPatterns.push(patternInfo);\n } else if (root === null) {\n unsafePatterns.push(patternInfo);\n } else {\n if (!rootedGroups.has(root)) {\n rootedGroups.set(root, []);\n }\n rootedGroups.get(root)!.push(patternInfo);\n }\n }\n\n if (unsafePatterns.length > 0) {\n logger.debug(\n `Skipping ${unsafePatterns.length} unbounded patterns (no static walk root): ` +\n unsafePatterns.map(p => p.pattern).join(', ')\n );\n }\n\n if (rootOnlyPatterns.length > 0) {\n try {\n const rootLevelGlobs = rootOnlyPatterns.map(p => p.pattern);\n const matched = await fg(rootLevelGlobs, {\n cwd: workspaceRoot,\n dot: true,\n onlyFiles: true,\n deep: 1,\n ignore: IGNORED_DIRS,\n });\n\n for (const relativePath of matched) {\n addMatchToMap(filesMap, relativePath, rootOnlyPatterns, workspaceRoot);\n }\n } catch (error) {\n logger.debug('Error scanning root-only patterns', { error });\n }\n }\n\n for (const [root, groupPatterns] of rootedGroups) {\n try {\n const scopedGlobs = groupPatterns.map(p => {\n if (root && p.pattern.startsWith(root + '/')) {\n return p.pattern.slice(root.length + 1);\n }\n return p.pattern;\n });\n\n const matched = await fg(scopedGlobs, {\n cwd: root ? join(workspaceRoot, root) : workspaceRoot,\n dot: true,\n onlyFiles: true,\n ignore: IGNORED_DIRS,\n });\n\n for (const matchedRelative of matched) {\n const relativePath = root ? `${root}/${matchedRelative}` : matchedRelative;\n addMatchToMap(filesMap, relativePath, groupPatterns, workspaceRoot);\n }\n } catch (error) {\n logger.debug(`Error scanning rooted group \"${root}\"`, { error });\n }\n }\n\n return filesMap;\n}\n\n/**\n * Add a matched file path to the results map, attributing it to the first matching pattern\n */\nfunction addMatchToMap(\n filesMap: Map<string, UntrackedFile>,\n relativePath: string,\n patterns: PatternInfo[],\n workspaceRoot: string\n): void {\n const filename = basename(relativePath);\n if (isJunk(filename)) return;\n\n const absolutePath = join(workspaceRoot, relativePath);\n const normalizedPath = normalizePathForProcessing(absolutePath);\n\n if (filesMap.has(normalizedPath)) return;\n\n const matchingPattern = patterns.find(p =>\n minimatch(relativePath, p.pattern, { dot: true })\n ) || patterns[0];\n\n filesMap.set(normalizedPath, {\n absolutePath: normalizedPath,\n workspacePath: normalizePathForProcessing(relativePath),\n platform: matchingPattern.platform,\n flowPattern: matchingPattern.pattern,\n category: matchingPattern.category,\n });\n}\n\n/**\n * Load all tracked file paths from workspace index\n * Returns Set of normalized absolute paths\n */\nasync function loadTrackedFilePaths(workspaceRoot: string): Promise<Set<string>> {\n const trackedPaths = new Set<string>();\n\n try {\n const { index } = await readWorkspaceIndex(workspaceRoot);\n \n // Extract all target paths from all packages\n for (const [packageName, packageData] of Object.entries(index.packages)) {\n const filesMapping = packageData.files || {};\n \n for (const [sourceKey, targets] of Object.entries(filesMapping)) {\n if (!Array.isArray(targets)) continue;\n \n for (const target of targets) {\n // Handle both string and object mappings\n const targetPath = typeof target === 'string' ? target : target.target;\n \n // Resolve target path to absolute\n const resolved = resolveDeclaredPath(targetPath, workspaceRoot);\n const normalized = normalizePathForProcessing(resolved.absolute);\n trackedPaths.add(normalized);\n }\n }\n }\n } catch (error) {\n logger.debug('Failed to load workspace index', { error });\n }\n\n return trackedPaths;\n}\n\n/**\n * Filter discovered files to only untracked ones\n */\nfunction filterUntrackedFiles(\n discoveredFiles: Map<string, UntrackedFile>,\n trackedPaths: Set<string>\n): UntrackedFile[] {\n const untracked: UntrackedFile[] = [];\n\n for (const [absolutePath, fileInfo] of discoveredFiles) {\n const normalized = normalizePathForProcessing(absolutePath);\n \n if (!trackedPaths.has(normalized)) {\n untracked.push(fileInfo);\n }\n }\n\n // Sort by workspace path for consistent output\n return untracked.sort((a, b) => a.workspacePath.localeCompare(b.workspacePath));\n}\n\n/**\n * Group untracked files by platform and category\n */\nfunction groupUntrackedFiles(files: UntrackedFile[]): UntrackedScanResult {\n const platformGroups = new Map<Platform, UntrackedFile[]>();\n const categoryGroups = new Map<string, UntrackedFile[]>();\n\n for (const file of files) {\n // Group by platform\n if (!platformGroups.has(file.platform)) {\n platformGroups.set(file.platform, []);\n }\n platformGroups.get(file.platform)!.push(file);\n\n // Group by category\n if (!categoryGroups.has(file.category)) {\n categoryGroups.set(file.category, []);\n }\n categoryGroups.get(file.category)!.push(file);\n }\n\n return {\n files,\n platformGroups,\n categoryGroups,\n totalFiles: files.length\n };\n}\n\n/**\n * Create an empty result\n */\nfunction createEmptyResult(): UntrackedScanResult {\n return {\n files: [],\n platformGroups: new Map(),\n categoryGroups: new Map(),\n totalFiles: 0\n };\n}\n", "/**\n * Resource Namespace Module\n *\n * Single source of truth for deriving category/namespace from paths.\n * Used by list pipeline, scope merger, and future consumers.\n */\n\nimport { stripExtension } from './resource-naming.js';\nimport { getResourceTypeDef, getMarkerFilename, findMarkerIndex, toPluralKey, type ResourceTypeId } from './resource-registry.js';\nimport { stripPlatformSuffixFromFilename } from '../flows/platform-suffix-handler.js';\n\n/**\n * Extract the path segment under a category directory from a full path.\n * Handles both source keys (rules/foo.mdc) and workspace paths (.cursor/rules/foo.mdc).\n *\n * @param path - Normalized path (source key, target, or workspace path)\n * @param categoryDir - The category directory (e.g., 'rules', 'agents')\n * @returns Path under category, or null if category not found\n */\nexport function getPathUnderCategory(path: string, categoryDir: string): string | null {\n const normalized = path.replace(/\\\\/g, '/').replace(/\\/$/, '');\n const parts = normalized.split('/');\n\n const idx = parts.indexOf(categoryDir);\n if (idx < 0) return null;\n\n const remaining = parts.slice(idx + 1);\n return remaining.length > 0 ? remaining.join('/') : '';\n}\n\n/**\n * Derive the namespace (path under category with extension stripped from last segment).\n * - File-based: \"basics/custom-rules.mdc\" \u2192 \"basics/custom-rules\"\n * - Skill: \"my-skill/readme.md\" \u2192 \"my-skill\" (first segment, directory-based)\n *\n * @param pathUnderCategory - Path under the category directory\n * @param resourceType - Singular type: rule, agent, skill, etc.\n */\nfunction deriveNamespace(pathUnderCategory: string, resourceType: ResourceTypeId): string {\n if (!pathUnderCategory || pathUnderCategory === '') return 'unnamed';\n\n const parts = pathUnderCategory.split('/');\n\n const marker = getMarkerFilename(resourceType);\n if (marker) {\n return deriveMarkerNamespace(parts, marker);\n }\n\n // Strip platform suffix (e.g. git-manager.opencode.md -> git-manager.md) so platform-specific\n // variants group under the same resource\n const pathStripped = stripPlatformSuffixFromFilename(pathUnderCategory);\n const strippedParts = pathStripped.split('/');\n const lastSegment = strippedParts[strippedParts.length - 1] ?? '';\n const nameWithoutExt = stripExtension(lastSegment);\n\n if (strippedParts.length === 1) {\n return nameWithoutExt || lastSegment;\n }\n\n const subpath = strippedParts.slice(0, -1).join('/');\n return subpath ? `${subpath}/${nameWithoutExt}` : nameWithoutExt;\n}\n\n/**\n * Derive the full resource identifier (category/namespace) from a path.\n *\n * @param path - Source key, target path, or workspace path\n * @param resourceType - Singular type: rule, agent, skill, command, hook, mcp, other\n * @returns Full name like \"rules/custom-rules\", \"rules/basics/custom-rules\", \"agents/agent-creator\"\n */\nexport function deriveResourceFullName(path: string, resourceType: ResourceTypeId): string {\n const normalizedType = resourceType as ResourceTypeId;\n\n if (normalizedType === 'mcp') {\n return 'mcps/configs';\n }\n\n if (normalizedType === 'other') {\n return 'other';\n }\n\n const def = getResourceTypeDef(normalizedType);\n const categoryDir = def.dirName;\n\n if (!categoryDir) {\n return `other/${deriveNamespace(path, 'other')}`;\n }\n\n const pathUnder = getPathUnderCategory(path, categoryDir);\n if (pathUnder === null) {\n const pluralKey = toPluralKey(normalizedType);\n const fallback = path.replace(/\\\\/g, '/').split('/').pop() ?? 'unnamed';\n return `${pluralKey}/${stripExtension(fallback)}`;\n }\n\n const namespace = deriveNamespace(pathUnder, normalizedType);\n const pluralKey = toPluralKey(normalizedType);\n return `${pluralKey}/${namespace}`;\n}\n\n/**\n * Derive namespace for a marker-based resource type from its path segments.\n *\n * Marker-based resources (e.g. skills with SKILL.md) use the parent directory\n * of the marker file as the namespace boundary. For paths containing the marker,\n * everything before it is the namespace. For other files, falls back to the\n * first segment.\n */\nfunction deriveMarkerNamespace(parts: string[], marker: string): string {\n const idx = findMarkerIndex(parts, marker);\n if (idx > 0) {\n return parts.slice(0, idx).join('/');\n }\n return parts[0] || 'unnamed';\n}\n\n/**\n * Scan a list of paths and extract namespace boundaries by finding marker\n * file entries for the given resource type.\n *\n * Returns boundaries sorted longest-first so callers can match greedily.\n */\nexport function buildMarkerBoundaries(paths: string[], resourceType: ResourceTypeId): string[] {\n const marker = getMarkerFilename(resourceType);\n if (!marker) return [];\n\n const def = getResourceTypeDef(resourceType);\n const categoryDir = def.dirName;\n if (!categoryDir) return [];\n\n const boundaries = new Set<string>();\n for (const p of paths) {\n const pathUnder = getPathUnderCategory(p, categoryDir);\n if (pathUnder === null) continue;\n const parts = pathUnder.split('/');\n const idx = findMarkerIndex(parts, marker);\n if (idx > 0) {\n boundaries.add(parts.slice(0, idx).join('/'));\n }\n }\n // Sort longest-first for greedy prefix matching\n return Array.from(boundaries).sort((a, b) => b.length - a.length);\n}\n\n/**\n * Derive the full resource name for a file belonging to a marker-based\n * resource type, using pre-computed boundaries to correctly group files\n * under nested directories.\n *\n * Falls back to standard derivation when no boundary matches.\n */\nexport function deriveMarkerFullName(path: string, resourceType: ResourceTypeId, boundaries: string[]): string {\n const def = getResourceTypeDef(resourceType);\n const categoryDir = def.dirName;\n const pluralKey = toPluralKey(resourceType);\n\n if (!categoryDir) {\n const fallback = path.replace(/\\\\/g, '/').split('/').pop() ?? 'unnamed';\n return `${pluralKey}/${stripExtension(fallback)}`;\n }\n\n const pathUnder = getPathUnderCategory(path, categoryDir);\n if (pathUnder === null) {\n const fallback = path.replace(/\\\\/g, '/').split('/').pop() ?? 'unnamed';\n return `${pluralKey}/${stripExtension(fallback)}`;\n }\n\n // Try matching against known boundaries (longest-first)\n for (const boundary of boundaries) {\n if (pathUnder === boundary || pathUnder.startsWith(boundary + '/')) {\n return `${pluralKey}/${boundary}`;\n }\n }\n\n // Fallback: use standard marker namespace derivation\n const marker = getMarkerFilename(resourceType);\n const parts = pathUnder.split('/');\n return `${pluralKey}/${marker ? deriveMarkerNamespace(parts, marker) : parts[0] || 'unnamed'}`;\n}\n", "/**\n * Unified Resource Classifier\n *\n * Batch-classifies source keys and untracked paths into resource identities,\n * correctly handling marker-based types (e.g. nested skills with SKILL.md)\n * via pre-scan + boundary matching.\n *\n * Replaces per-key classification from source-key-classifier.ts which used\n * naive parts[1] logic and broke nested skill names.\n */\n\nimport { DIR_TO_TYPE, type ResourceTypeId } from './resource-registry.js';\nimport { getMarkerFilename, getResourceTypeDef } from './resource-registry.js';\nimport { buildMarkerBoundaries, deriveMarkerFullName, deriveResourceFullName, getPathUnderCategory } from './resource-namespace.js';\nimport { normalizeType, toPluralKey } from './resource-registry.js';\n\nexport interface GroupedUntrackedResource {\n resourceType: ResourceTypeId;\n resourceName: string;\n fullName: string;\n filePaths: string[];\n}\n\nexport interface ClassifiedResource {\n resourceType: ResourceTypeId;\n resourceName: string; // e.g. \"openpackage/skill-creator\"\n fullName: string; // e.g. \"skills/openpackage/skill-creator\"\n}\n\n/**\n * Detect the resource type from a source key's first path segment.\n * Returns 'mcp' for mcp.json/mcp.jsonc, 'other' for unrecognized dirs.\n */\nfunction detectTypeFromSourceKey(sourceKey: string): ResourceTypeId {\n const normalized = sourceKey.replace(/\\\\/g, '/').replace(/\\/$/, '');\n const parts = normalized.split('/');\n\n if (parts.length === 1 && (sourceKey === 'mcp.json' || sourceKey === 'mcp.jsonc')) {\n return 'mcp';\n }\n\n return DIR_TO_TYPE[parts[0]] ?? 'other';\n}\n\n/**\n * Extract resourceName from fullName by stripping the plural prefix.\n * e.g. \"skills/openpackage/skill-creator\" \u2192 \"openpackage/skill-creator\"\n * \"rules/basics/custom-rules\" \u2192 \"basics/custom-rules\"\n * \"mcps/configs\" \u2192 \"configs\"\n * \"other\" \u2192 \"other\"\n */\nfunction extractResourceName(fullName: string, resourceType: ResourceTypeId): string {\n const pluralKey = toPluralKey(resourceType);\n const prefix = pluralKey + '/';\n if (fullName.startsWith(prefix)) {\n return fullName.slice(prefix.length);\n }\n // For 'other' type, fullName is just \"other\"\n return fullName;\n}\n\n/**\n * Batch-classify source keys (tracked resources from workspace index).\n *\n * Groups keys by type, builds marker boundaries for marker-based types,\n * then derives the correct fullName for each key.\n *\n * @invariant Always returns an entry for every input key \u2014 no keys are\n * skipped. The `!` non-null assertions at call sites are therefore safe.\n */\nexport function classifySourceKeyBatch(sourceKeys: string[]): Map<string, ClassifiedResource> {\n const result = new Map<string, ClassifiedResource>();\n if (sourceKeys.length === 0) return result;\n\n // Step 1: Detect type per key and group by type\n const typeMap = new Map<ResourceTypeId, string[]>();\n const keyTypes = new Map<string, ResourceTypeId>();\n\n for (const key of sourceKeys) {\n const resourceType = detectTypeFromSourceKey(key);\n keyTypes.set(key, resourceType);\n if (!typeMap.has(resourceType)) {\n typeMap.set(resourceType, []);\n }\n typeMap.get(resourceType)!.push(key);\n }\n\n // Step 2: Build marker boundaries for types that have markers\n const boundaryCache = new Map<ResourceTypeId, string[]>();\n for (const [resourceType, keys] of typeMap) {\n if (getMarkerFilename(resourceType)) {\n boundaryCache.set(resourceType, buildMarkerBoundaries(keys, resourceType));\n }\n }\n\n // Step 3: Classify each key\n for (const key of sourceKeys) {\n const resourceType = keyTypes.get(key)!;\n const boundaries = boundaryCache.get(resourceType);\n const fullName = boundaries && boundaries.length > 0\n ? deriveMarkerFullName(key, resourceType, boundaries)\n : deriveResourceFullName(key, resourceType);\n const resourceName = extractResourceName(fullName, resourceType);\n\n result.set(key, { resourceType, resourceName, fullName });\n }\n\n return result;\n}\n\n/**\n * Batch-classify untracked paths (type already known from scanner).\n *\n * Similar to classifySourceKeyBatch but uses the provided resourceType\n * instead of detecting from path prefix, and operates on workspace paths\n * (which may have platform-specific prefixes like `.claude/skills/...`).\n */\nexport function classifyUntrackedPaths(\n files: Array<{ path: string; resourceType: ResourceTypeId }>\n): Map<string, ClassifiedResource> {\n const result = new Map<string, ClassifiedResource>();\n if (files.length === 0) return result;\n\n // Step 1: Group by type\n const typeMap = new Map<ResourceTypeId, string[]>();\n const pathTypes = new Map<string, ResourceTypeId>();\n\n for (const file of files) {\n pathTypes.set(file.path, file.resourceType);\n if (!typeMap.has(file.resourceType)) {\n typeMap.set(file.resourceType, []);\n }\n typeMap.get(file.resourceType)!.push(file.path);\n }\n\n // Step 2: Build marker boundaries for types that have markers\n const boundaryCache = new Map<ResourceTypeId, string[]>();\n for (const [resourceType, paths] of typeMap) {\n if (getMarkerFilename(resourceType)) {\n boundaryCache.set(resourceType, buildMarkerBoundaries(paths, resourceType));\n }\n }\n\n // Step 3: Classify each path\n for (const file of files) {\n const marker = getMarkerFilename(file.resourceType);\n\n // Marker enforcement: for marker-based types (e.g. skill with SKILL.md),\n // only classify files that are the marker itself or fall within a known\n // marker boundary. Orphan files (no marker nearby) are excluded.\n if (marker) {\n const def = getResourceTypeDef(file.resourceType);\n const categoryDir = def.dirName;\n if (categoryDir) {\n const pathUnder = getPathUnderCategory(file.path, categoryDir);\n if (pathUnder !== null) {\n const pathBasename = pathUnder.split('/').pop() ?? '';\n const isMarkerFile = pathBasename === marker;\n const boundaries = boundaryCache.get(file.resourceType) ?? [];\n const withinBoundary = boundaries.some(\n b => pathUnder === b || pathUnder.startsWith(b + '/')\n );\n if (!isMarkerFile && !withinBoundary) {\n continue; // orphan file \u2014 skip\n }\n }\n }\n }\n\n const boundaries = boundaryCache.get(file.resourceType);\n const fullName = boundaries && boundaries.length > 0\n ? deriveMarkerFullName(file.path, file.resourceType, boundaries)\n : deriveResourceFullName(file.path, file.resourceType);\n const resourceName = extractResourceName(fullName, file.resourceType);\n\n result.set(file.path, { resourceType: file.resourceType, resourceName, fullName });\n }\n\n return result;\n}\n\n/**\n * Classify untracked files and group them by resource key.\n *\n * Combines `classifyUntrackedPaths` with the iterate-and-group pattern that\n * both `resource-builder.ts` and `scope-data-collector.ts` duplicate. Orphan\n * files (skipped by marker enforcement in `classifyUntrackedPaths`) are\n * automatically excluded.\n *\n * @param files - Untracked files with workspace-relative paths and scanner categories\n * @returns Map from `\"resourceType::resourceName\"` to grouped resource info\n */\nexport function classifyAndGroupUntrackedFiles(\n files: Array<{ workspacePath: string; category: string }>\n): Map<string, GroupedUntrackedResource> {\n if (files.length === 0) return new Map();\n\n const classified = classifyUntrackedPaths(\n files.map(f => ({\n path: f.workspacePath,\n resourceType: normalizeType(f.category),\n }))\n );\n\n const grouped = new Map<string, GroupedUntrackedResource>();\n\n for (const file of files) {\n const cls = classified.get(file.workspacePath);\n if (!cls) continue; // orphan \u2014 skipped by marker enforcement\n\n const key = `${cls.resourceType}::${cls.resourceName}`;\n\n if (!grouped.has(key)) {\n grouped.set(key, {\n resourceType: cls.resourceType,\n resourceName: cls.resourceName,\n fullName: cls.fullName,\n filePaths: [],\n });\n }\n grouped.get(key)!.filePaths.push(file.workspacePath);\n }\n\n return grouped;\n}\n", "/**\n * Resource Builder\n *\n * Scans all packages in the workspace index and untracked files to build\n * a flat list of resolved resources for resource-level operations.\n *\n * Also provides a source-side builder that wraps the resource discoverer\n * to build resources from a package source directory.\n */\n\nimport { readWorkspaceIndex } from '../../utils/workspace-index-yml.js';\nimport { getTargetPath } from '../../utils/workspace-index-helpers.js';\nimport { scanUntrackedFiles } from '../list/untracked-files-scanner.js';\nimport { RESOURCE_TYPE_ORDER } from './resource-registry.js';\nimport { classifySourceKeyBatch, classifyAndGroupUntrackedFiles } from './resource-classifier.js';\nimport type { ResourceScope } from './scope-traversal.js';\nimport type { ResourceTypeId } from '../../types/resources.js';\nimport { logger } from '../../utils/logger.js';\n\nexport interface ResolvedResource {\n kind: 'tracked' | 'untracked';\n resourceName: string;\n resourceType: ResourceTypeId;\n /** Package name (only for tracked resources) */\n packageName?: string;\n /** Source keys in workspace index that belong to this resource (only for tracked) */\n sourceKeys: Set<string>;\n /** Target file paths in the workspace (for display) */\n targetFiles: string[];\n scope: ResourceScope;\n /** Absolute path to source file/directory (source-side builder only) */\n sourcePath?: string;\n /** Install kind - file or directory (source-side builder only) */\n installKind?: 'file' | 'directory' | 'plugin';\n}\n\nexport interface ResolvedPackage {\n packageName: string;\n version?: string;\n resourceCount: number;\n targetFiles: string[];\n scope: ResourceScope;\n}\n\nexport interface WorkspaceResources {\n resources: ResolvedResource[];\n packages: ResolvedPackage[];\n}\n\n/**\n * Build a flat list of all workspace resources (tracked and untracked)\n * for use in resource-level operations.\n */\nexport async function buildWorkspaceResources(\n targetDir: string,\n scope: ResourceScope\n): Promise<WorkspaceResources> {\n const { index } = await readWorkspaceIndex(targetDir);\n const packages = index.packages || {};\n\n const resources: ResolvedResource[] = [];\n const resolvedPackages: ResolvedPackage[] = [];\n\n // Process tracked packages (including workspace package \u2014 its installed\n // resources can be uninstalled; manifest removal is skipped for root package)\n for (const [pkgName, pkgEntry] of Object.entries(packages)) {\n const filesMapping = pkgEntry.files || {};\n const resourceMap = new Map<string, { sourceKeys: Set<string>; targetFiles: string[] }>();\n const allTargetFiles: string[] = [];\n\n const classified = classifySourceKeyBatch(Object.keys(filesMapping));\n\n for (const [sourceKey, mappings] of Object.entries(filesMapping)) {\n const cls = classified.get(sourceKey)!;\n const { resourceType, resourceName } = cls;\n\n const key = resourceType === 'other'\n ? 'other::other'\n : `${resourceType}::${resourceName}`;\n\n if (!resourceMap.has(key)) {\n resourceMap.set(key, { sourceKeys: new Set(), targetFiles: [] });\n }\n const entry = resourceMap.get(key)!;\n entry.sourceKeys.add(sourceKey);\n\n for (const mapping of mappings) {\n const target = getTargetPath(mapping);\n entry.targetFiles.push(target);\n allTargetFiles.push(target);\n }\n }\n\n // Create ResolvedResource entries for this package\n for (const [key, entry] of resourceMap) {\n const [resourceType, resourceName] = key.split('::') as [ResourceTypeId, string];\n resources.push({\n kind: 'tracked',\n resourceName,\n resourceType,\n packageName: pkgName,\n sourceKeys: entry.sourceKeys,\n targetFiles: entry.targetFiles,\n scope,\n });\n }\n\n // Build ResolvedPackage\n resolvedPackages.push({\n packageName: pkgName,\n version: pkgEntry.version,\n resourceCount: resourceMap.size,\n targetFiles: allTargetFiles,\n scope,\n });\n }\n\n // Process untracked files (isolated so scan failure doesn't discard tracked results)\n try {\n const untrackedResult = await scanUntrackedFiles(targetDir);\n const grouped = classifyAndGroupUntrackedFiles(untrackedResult.files);\n\n for (const [, group] of grouped) {\n resources.push({\n kind: 'untracked',\n resourceName: group.resourceName,\n resourceType: group.resourceType,\n sourceKeys: new Set(),\n targetFiles: group.filePaths,\n scope,\n });\n }\n } catch (error) {\n logger.warn('Failed to scan untracked files, returning tracked results only', {\n reason: error instanceof Error ? error.message : String(error),\n });\n }\n\n // Sort resources by type order then name\n const typeOrderMap = new Map(RESOURCE_TYPE_ORDER.map((t, i) => [t, i]));\n resources.sort((a, b) => {\n const orderA = typeOrderMap.get(a.resourceType as any) ?? Infinity;\n const orderB = typeOrderMap.get(b.resourceType as any) ?? Infinity;\n if (orderA !== orderB) return orderA - orderB;\n return a.resourceName.localeCompare(b.resourceName);\n });\n\n // Sort packages by name\n resolvedPackages.sort((a, b) => a.packageName.localeCompare(b.packageName));\n\n return { resources, packages: resolvedPackages };\n}\n\n/**\n * Build a flat list of resources from a package source directory.\n * Wraps the resource discoverer and normalizes output into ResolvedResource[].\n * Used by commands that operate on package sources (add --copy, remove).\n *\n * @param sourceDir - Absolute path to the package source directory\n * @param scope - Resource scope for the source\n */\nexport async function buildSourceResources(\n sourceDir: string,\n scope: ResourceScope\n): Promise<WorkspaceResources> {\n const { discoverResources } = await import('../install/resource-discoverer.js');\n const discovery = await discoverResources(sourceDir, sourceDir);\n\n const resources: ResolvedResource[] = [];\n\n for (const discovered of discovery.all) {\n resources.push({\n kind: 'tracked',\n resourceName: discovered.displayName,\n resourceType: discovered.resourceType,\n sourceKeys: new Set([discovered.resourcePath]),\n targetFiles: [discovered.resourcePath],\n scope,\n sourcePath: discovered.filePath,\n installKind: discovered.installKind,\n });\n }\n\n // Sort by type order then name (same as workspace builder)\n const typeOrderMap = new Map(RESOURCE_TYPE_ORDER.map((t, i) => [t, i]));\n resources.sort((a, b) => {\n const orderA = typeOrderMap.get(a.resourceType as any) ?? Infinity;\n const orderB = typeOrderMap.get(b.resourceType as any) ?? Infinity;\n if (orderA !== orderB) return orderA - orderB;\n return a.resourceName.localeCompare(b.resourceName);\n });\n\n return { resources, packages: [] };\n}\n", "/**\n * Resource Resolver\n *\n * Resolves a user-provided name (e.g., `custom-rules`) to matching\n * resources and/or packages in the workspace. Used by uninstall, save,\n * and which commands for direct name resolution.\n */\n\nimport { buildWorkspaceResources, buildSourceResources, type ResolvedResource, type ResolvedPackage, type WorkspaceResources } from './resource-builder.js';\nimport type { ResourceScope } from './scope-traversal.js';\nimport { formatScopeTag } from '../../utils/formatters.js';\nimport { logger } from '../../utils/logger.js';\n\nexport interface ResolutionCandidate {\n kind: 'resource' | 'package';\n resource?: ResolvedResource;\n package?: ResolvedPackage;\n}\n\nexport interface ResolutionResult {\n candidates: ResolutionCandidate[];\n}\n\n// ---------------------------------------------------------------------------\n// Candidate accessors\n// ---------------------------------------------------------------------------\n\n/** Extract the scope from a resolution candidate. */\nexport function getCandidateScope(candidate: ResolutionCandidate): ResourceScope | undefined {\n return candidate.kind === 'resource' ? candidate.resource?.scope : candidate.package?.scope;\n}\n\n// ---------------------------------------------------------------------------\n// Shared candidate formatters (used by uninstall and save disambiguation)\n// ---------------------------------------------------------------------------\n\nexport function formatCandidateTitle(candidate: ResolutionCandidate): string {\n if (candidate.kind === 'package') {\n const pkg = candidate.package!;\n const version = pkg.version && pkg.version !== '0.0.0' ? ` (v${pkg.version})` : '';\n const scopeTag = formatScopeTag(pkg.scope);\n return `${pkg.packageName}${version} (package, ${pkg.resourceCount} resources)${scopeTag}`;\n }\n const r = candidate.resource!;\n const fromPkg = r.packageName ? `, from ${r.packageName}` : '';\n const scopeTag = formatScopeTag(r.scope);\n return `${r.resourceName} (${r.resourceType}${fromPkg})${scopeTag}`;\n}\n\nexport function formatCandidateDescription(candidate: ResolutionCandidate): string {\n const files = candidate.kind === 'package'\n ? candidate.package!.targetFiles\n : candidate.resource!.targetFiles;\n if (files.length === 0) return 'no files';\n const displayFiles = files.slice(0, 5);\n const remaining = files.length - displayFiles.length;\n let desc = displayFiles.join('\\n');\n if (remaining > 0) {\n desc += `\\n+${remaining} more`;\n }\n return desc;\n}\n\n// ---------------------------------------------------------------------------\n// Shared matching logic\n// ---------------------------------------------------------------------------\n\n/**\n * Match a name against resources and packages in a WorkspaceResources structure.\n *\n * Resources are matched case-insensitively by `resourceName`.\n * Packages are matched exactly (case-sensitive) by `packageName`.\n *\n * Pure function \u2014 no I/O.\n */\nexport function matchCandidates(workspace: WorkspaceResources, name: string): ResolutionCandidate[] {\n const candidates: ResolutionCandidate[] = [];\n const nameLower = name.toLowerCase();\n\n for (const resource of workspace.resources) {\n if (resource.resourceName.toLowerCase() === nameLower) {\n candidates.push({ kind: 'resource', resource });\n }\n }\n\n for (const pkg of workspace.packages) {\n if (pkg.packageName === name) {\n candidates.push({ kind: 'package', package: pkg });\n }\n }\n\n return candidates;\n}\n\n// ---------------------------------------------------------------------------\n// Workspace-side resolution\n// ---------------------------------------------------------------------------\n\n/**\n * Resolve a name to matching resources and packages within a single scope.\n *\n * Resources are matched case-insensitively by `resourceName`.\n * Packages are matched exactly (case-sensitive) by `packageName`.\n *\n * @param name - User-provided name to resolve\n * @param targetDir - Workspace directory to search\n * @param scope - Resource scope ('project' or 'global')\n * @returns Resolution result with matching candidates\n */\nexport async function resolveByName(\n name: string,\n targetDir: string,\n scope: ResourceScope\n): Promise<ResolutionResult> {\n const workspace = await buildWorkspaceResources(targetDir, scope);\n return { candidates: matchCandidates(workspace, name) };\n}\n\n// ---------------------------------------------------------------------------\n// Source-side resolution\n// ---------------------------------------------------------------------------\n\n/**\n * Resolve a name to matching resources in a package source directory.\n *\n * Source-side counterpart to `resolveByName` \u2014 scans the package source\n * (e.g., `~/.openpackage/packages/foo/`) instead of the deployed workspace.\n *\n * @param name - User-provided resource name to resolve\n * @param sourceDir - Absolute path to the package source directory\n * @param scope - Resource scope\n * @returns Resolution result with matching candidates\n */\nexport async function resolveFromSource(\n name: string,\n sourceDir: string,\n scope: ResourceScope\n): Promise<ResolutionResult> {\n const workspace = await buildSourceResources(sourceDir, scope);\n return { candidates: matchCandidates(workspace, name) };\n}\n\n/**\n * Resolve a name across both project and global scopes.\n * \n * If the project directory has no .openpackage workspace, only global\n * results are returned (no error is thrown).\n * \n * @param name - User-provided name to resolve\n * @param projectDir - Project workspace directory\n * @param globalDir - Global workspace directory\n * @returns Combined resolution result from both scopes\n */\nexport async function resolveAcrossScopes(\n name: string,\n projectDir: string,\n globalDir: string\n): Promise<ResolutionResult> {\n let projectCandidates: ResolutionCandidate[] = [];\n\n try {\n const projectResult = await resolveByName(name, projectDir, 'project');\n projectCandidates = projectResult.candidates;\n } catch (error) {\n logger.debug('Project scope resolution skipped', {\n projectDir,\n reason: error instanceof Error ? error.message : String(error),\n });\n }\n\n const globalResult = await resolveByName(name, globalDir, 'global');\n\n return {\n candidates: [...projectCandidates, ...globalResult.candidates],\n };\n}\n", "import { createExecutionContext } from '../execution-context.js';\nimport type { ExecutionContext } from '../../types/execution-context.js';\nimport { logger } from '../../utils/logger.js';\n\n/**\n * Resource scope for workspace operations\n */\nexport type ResourceScope = 'project' | 'global';\n\nexport interface ScopeEntry {\n scope: ResourceScope;\n context: ExecutionContext;\n}\n\nexport interface TraverseScopesOptions {\n /** Program-level options (e.g., from command.parent?.opts()) */\n programOpts?: Record<string, any>;\n /** If true, skip project scope entirely */\n globalOnly?: boolean;\n /** If true, skip global scope entirely */\n projectOnly?: boolean;\n /** Optional callback invoked when a scope fails. If not provided, errors are logged at debug level. */\n onScopeError?: (scope: ResourceScope, error: unknown) => void;\n}\n\n/**\n * Traverse applicable scopes (project and/or global) and run a callback for each.\n * \n * Project scope failures are silently skipped (common when no .openpackage workspace exists).\n * Global scope failures are also caught and logged.\n * \n * @param options - Scope traversal options\n * @param callback - Async function to run for each scope\n * @returns Array of results from successful scope callbacks\n */\nexport async function traverseScopes<T>(\n options: TraverseScopesOptions,\n callback: (entry: ScopeEntry) => Promise<T>\n): Promise<Array<{ scope: ResourceScope; result: T }>> {\n const results: Array<{ scope: ResourceScope; result: T }> = [];\n const cwd = options.programOpts?.cwd;\n\n // Project scope\n if (!options.globalOnly) {\n try {\n const context = await createExecutionContext({ global: false, cwd });\n const result = await callback({ scope: 'project', context });\n results.push({ scope: 'project', result });\n } catch (error) {\n if (options.onScopeError) {\n options.onScopeError('project', error);\n } else {\n logger.debug('Project scope traversal skipped', {\n reason: error instanceof Error ? error.message : String(error),\n });\n }\n }\n }\n\n // Global scope\n if (!options.projectOnly) {\n try {\n const context = await createExecutionContext({ global: true, cwd });\n const result = await callback({ scope: 'global', context });\n results.push({ scope: 'global', result });\n } catch (error) {\n if (options.onScopeError) {\n options.onScopeError('global', error);\n } else {\n logger.debug('Global scope traversal skipped', {\n reason: error instanceof Error ? error.message : String(error),\n });\n }\n }\n }\n\n return results;\n}\n\n/**\n * Traverse scopes and flatten/merge results into a single array.\n * Convenience wrapper when you just need all items across scopes.\n */\nexport async function traverseScopesFlat<T>(\n options: TraverseScopesOptions,\n callback: (entry: ScopeEntry) => Promise<T[]>\n): Promise<T[]> {\n const scopeResults = await traverseScopes(options, callback);\n return scopeResults.flatMap(sr => sr.result);\n}\n", "/**\n * Disambiguation Prompt\n *\n * Reusable \"0/1/N\" resolution pattern for resource and package name disambiguation.\n * - 0 candidates \u2192 throws ValidationError\n * - 1 candidate \u2192 returns it directly (no prompt)\n * - N candidates \u2192 shows multiselect prompt for user to choose\n */\n\nimport type { OutputPort } from '../ports/output.js';\nimport type { PromptPort } from '../ports/prompt.js';\nimport { resolveOutput, resolvePrompt } from '../ports/resolve.js';\nimport { ValidationError } from '../../utils/errors.js';\n\nexport interface DisambiguationChoice<T> {\n title: string;\n description?: string;\n value: T;\n}\n\nexport interface DisambiguationOptions {\n /** Message shown when no candidates found. Use ${name} as placeholder for the searched name. */\n notFoundMessage?: string;\n /** Header message shown above choices when multiple candidates found. Use ${name} placeholder. */\n ambiguousHeader?: string;\n /** Prompt message for the multiselect */\n promptMessage?: string;\n /** Whether to allow multiple selections (default: true) */\n multi?: boolean;\n}\n\n/**\n * Disambiguate among candidates using the 0/1/N pattern.\n *\n * @param name - The name that was searched for (used in messages)\n * @param candidates - Array of candidate items\n * @param formatChoice - Function to format each candidate as a prompt choice\n * @param options - Configuration options\n * @param output - Optional OutputPort\n * @param prompt - Optional PromptPort\n * @returns Array of selected candidates (single-element for 1 match or multi=false)\n */\nexport async function disambiguate<T>(\n name: string,\n candidates: T[],\n formatChoice: (candidate: T, index: number) => DisambiguationChoice<T>,\n options: DisambiguationOptions = {},\n output?: OutputPort,\n prompt?: PromptPort\n): Promise<T[]> {\n const {\n notFoundMessage = `\"${name}\" not found.\\nRun \\`opkg ls\\` to see installed resources.`,\n ambiguousHeader = `\\n\"${name}\" matches multiple items:\\n`,\n promptMessage = 'Select which to act on:',\n multi = true,\n } = options;\n\n const out = output ?? resolveOutput();\n const prm = prompt ?? resolvePrompt();\n\n // 0 candidates \u2192 error\n if (candidates.length === 0) {\n throw new ValidationError(\n notFoundMessage.replace(/\\$\\{name\\}/g, name)\n );\n }\n\n // 1 candidate \u2192 auto-select\n if (candidates.length === 1) {\n return [candidates[0]];\n }\n\n // N candidates \u2192 prompt\n const choices = candidates.map((c, i) => {\n const choice = formatChoice(c, i);\n return {\n title: choice.title,\n description: choice.description,\n value: i,\n };\n });\n\n out.info(ambiguousHeader.replace(/\\$\\{name\\}/g, name).trim());\n\n if (multi) {\n const selectedIndices = await prm.multiselect<number>(\n promptMessage,\n choices\n );\n\n if (!selectedIndices || selectedIndices.length === 0) {\n return [];\n }\n return selectedIndices.map(i => candidates[i]);\n } else {\n // Single select mode\n const selectedIndex = await prm.select<number>(\n promptMessage,\n choices\n );\n\n if (selectedIndex === null || selectedIndex === undefined) {\n return [];\n }\n return [candidates[selectedIndex]];\n }\n}\n", "/**\n * Resource Spec Classification & Resolution\n *\n * Centralized module for classifying and resolving user-provided resource specs\n * (e.g., `agents/ui-designer`, `./file.txt`, `essentials`).\n *\n * Used by add, remove, save, sync, and uninstall commands.\n */\n\nimport type { ExecutionContext } from '../../types/execution-context.js';\nimport type { OutputPort } from '../ports/output.js';\nimport type { PromptPort } from '../ports/prompt.js';\nimport { parseResourceQuery, type ResourceQuery } from './resource-query.js';\nimport { resolveByName, formatCandidateTitle, formatCandidateDescription, getCandidateScope, type ResolutionCandidate } from './resource-resolver.js';\nimport { traverseScopesFlat, type TraverseScopesOptions } from './scope-traversal.js';\nimport { disambiguate, type DisambiguationOptions } from './disambiguation-prompt.js';\nimport { resolveOutput, resolvePrompt } from '../ports/resolve.js';\nimport { readWorkspaceIndex } from '../../utils/workspace-index-yml.js';\nimport { resolveDeclaredPath } from '../../utils/path-resolution.js';\n\n// ---------------------------------------------------------------------------\n// Classification\n// ---------------------------------------------------------------------------\n\nexport type ResourceSpecClassification =\n | { kind: 'explicit-path' }\n | { kind: 'resource-ref'; query: ResourceQuery }\n | { kind: 'other' };\n\n/**\n * Classify a user-provided resource spec string.\n *\n * Synchronous, deterministic, no I/O.\n *\n * Rules (in priority order):\n * 1. `./`, `../`, `/`, `~/` prefix (or `.` / `~` alone) \u2192 explicit-path\n * 2. Trailing `/` \u2192 other (directory intent)\n * 3. Known type prefix via `parseResourceQuery()` with non-empty name \u2192 resource-ref\n * 4. Everything else \u2192 other\n */\nexport function classifyResourceSpec(input: string): ResourceSpecClassification {\n // Rule 1: explicit path prefixes\n if (\n input === '.' ||\n input === '~' ||\n input.startsWith('./') ||\n input.startsWith('../') ||\n input.startsWith('/') ||\n input.startsWith('~/')\n ) {\n return { kind: 'explicit-path' };\n }\n\n // Rule 2: trailing slash \u2192 directory intent\n if (input.endsWith('/')) {\n return { kind: 'other' };\n }\n\n // Rule 3: known type prefix\n const query = parseResourceQuery(input);\n if (query.typeFilter && query.name) {\n return { kind: 'resource-ref', query };\n }\n\n // Rule 4: everything else\n return { kind: 'other' };\n}\n\n// ---------------------------------------------------------------------------\n// Resolution\n// ---------------------------------------------------------------------------\n\nexport interface ResolvedTarget {\n candidate: ResolutionCandidate;\n targetDir: string;\n /** Absolute path to the package source directory (tilde-expanded from workspace index). */\n packageSourcePath?: string;\n}\n\nexport interface ResolveResourceSpecOptions extends DisambiguationOptions {\n /** Optional type filter to apply (usually from classifyResourceSpec). Overrides query.typeFilter if provided. */\n typeFilter?: string;\n /** If set, prefer candidates from this scope. When preferred-scope candidates exist, others are dropped before disambiguation. */\n scopePreference?: 'project' | 'global';\n}\n\n/**\n * Resolve a user-provided resource spec to concrete candidates.\n *\n * Composes: parseResourceQuery \u2192 traverseScopesFlat(resolveByName) \u2192 filter \u2192 disambiguate.\n *\n * @param input - User-provided resource spec (e.g., `agents/foo`, `foo`)\n * @param traverseOpts - Scope traversal options\n * @param options - Disambiguation options + optional type filter\n * @param ctx - Optional execution context for output/prompt ports\n * @returns Selected resolved targets\n */\nexport async function resolveResourceSpec(\n input: string,\n traverseOpts: TraverseScopesOptions,\n options?: ResolveResourceSpecOptions,\n ctx?: ExecutionContext,\n): Promise<ResolvedTarget[]> {\n const query = parseResourceQuery(input);\n\n // Resolve candidates across scopes\n const paired: ResolvedTarget[] = [];\n\n await traverseScopesFlat<null>(\n traverseOpts,\n async ({ scope, context }) => {\n const result = await resolveByName(query.name, context.targetDir, scope);\n if (result.candidates.length > 0) {\n // Read workspace index once per scope to enrich with package source paths\n const { index } = await readWorkspaceIndex(context.targetDir);\n for (const c of result.candidates) {\n let packageSourcePath: string | undefined;\n const pkgName = c.kind === 'resource' ? c.resource?.packageName : c.package?.packageName;\n if (pkgName) {\n const pkgEntry = index.packages[pkgName];\n if (pkgEntry?.path) {\n packageSourcePath = resolveDeclaredPath(pkgEntry.path, context.targetDir).absolute;\n }\n }\n paired.push({ candidate: c, targetDir: context.targetDir, packageSourcePath });\n }\n }\n return [null];\n },\n );\n\n // If type-qualified, filter by resource type\n const typeFilter = options?.typeFilter ?? query.typeFilter;\n let filtered = paired;\n if (typeFilter) {\n filtered = filtered.filter(\n p => p.candidate.kind === 'resource' && p.candidate.resource?.resourceType === typeFilter,\n );\n }\n\n // Bare-name preference: when input has no type qualifier,\n // prefer package candidates over resource candidates.\n // Resources require type qualification (e.g., skills/name).\n if (!typeFilter && filtered.length > 1) {\n const packageCandidates = filtered.filter(p => p.candidate.kind === 'package');\n if (packageCandidates.length > 0) {\n filtered = packageCandidates;\n }\n }\n\n // Scope preference: if preferred scope has candidates, drop others\n if (options?.scopePreference && filtered.length > 1) {\n const preferred = filtered.filter(p => getCandidateScope(p.candidate) === options.scopePreference);\n if (preferred.length > 0) {\n filtered = preferred;\n }\n }\n\n // Disambiguate\n const out = resolveOutput(ctx);\n const prm = resolvePrompt(ctx);\n\n const selected = await disambiguate(\n input,\n filtered,\n (p) => ({\n title: formatCandidateTitle(p.candidate),\n description: formatCandidateDescription(p.candidate),\n value: p,\n }),\n {\n notFoundMessage: options?.notFoundMessage ?? `\"${input}\" not found as a package.\\nHint: To target a resource, use its qualified name (e.g., skills/${input}).\\nRun \\`opkg ls\\` to see installed resources.`,\n ambiguousHeader: options?.ambiguousHeader,\n promptMessage: options?.promptMessage ?? 'Select which to act on:',\n multi: options?.multi,\n },\n out,\n prm,\n );\n\n return selected;\n}\n", "/**\n * Helper functions for working with workspace index file mappings\n */\n\nimport type { WorkspaceIndexFileMapping, WorkspaceIndexPackage } from '../types/workspace-index.js';\nimport { arePackageNamesEquivalent, normalizePackageNameForLookup } from './package-name.js';\nimport { classifyResourceSpec } from '../core/resources/resource-spec.js';\nimport { isQualifiedName } from './qualified-name.js';\n\n/**\n * Extract target path from a mapping (handles both string and object forms)\n */\nexport function getTargetPath(mapping: string | WorkspaceIndexFileMapping): string {\n return typeof mapping === 'string' ? mapping : mapping.target;\n}\n\n/**\n * Check if a mapping is complex (has key tracking)\n */\nexport function isComplexMapping(mapping: string | WorkspaceIndexFileMapping): mapping is WorkspaceIndexFileMapping {\n return typeof mapping !== 'string';\n}\n\n/**\n * Check if a mapping represents a merged file (multiple packages contributing to one target)\n */\nexport function isMergedMapping(\n mapping: string | WorkspaceIndexFileMapping\n): mapping is WorkspaceIndexFileMapping {\n return (\n typeof mapping !== 'string' &&\n !!mapping.merge &&\n Array.isArray(mapping.keys) &&\n mapping.keys.length > 0\n );\n}\n\n/**\n * Deduplicate target mappings by target path, preferring object mappings over plain strings.\n */\nexport function deduplicateTargets(\n existing: (string | WorkspaceIndexFileMapping)[],\n incoming: (string | WorkspaceIndexFileMapping)[]\n): (string | WorkspaceIndexFileMapping)[] {\n const byTarget = new Map<string, string | WorkspaceIndexFileMapping>();\n for (const m of existing) {\n byTarget.set(getTargetPath(m), m);\n }\n for (const m of incoming) {\n const tp = getTargetPath(m);\n const prior = byTarget.get(tp);\n if (!prior || (typeof prior === 'string' && typeof m !== 'string')) {\n byTarget.set(tp, m);\n }\n }\n return Array.from(byTarget.values());\n}\n\n/**\n * Extract all target paths from file mappings\n */\nexport function extractAllTargetPaths(\n files: Record<string, (string | WorkspaceIndexFileMapping)[]>\n): string[] {\n const paths: string[] = [];\n\n for (const mappings of Object.values(files)) {\n for (const mapping of mappings) {\n paths.push(getTargetPath(mapping));\n }\n }\n\n return paths;\n}\n\n/**\n * Find a package in the workspace index using multi-strategy fallback:\n * 1. Exact key match\n * 2. Case-insensitive equivalence\n * 3. Old\u2192new format normalization (@scope/repo \u2192 gh@scope/repo)\n * 4. Resource name match (e.g. \"skills/skill-creator\" matches package with source keys starting with \"skills/skill-creator/\")\n */\nexport function findPackageInIndex(\n input: string,\n packages: Record<string, WorkspaceIndexPackage>,\n): { key: string; entry: WorkspaceIndexPackage } | null {\n // 1. Exact key match\n if (packages[input]) {\n return { key: input, entry: packages[input] };\n }\n\n // 2. Case-insensitive equivalence\n for (const key of Object.keys(packages)) {\n if (arePackageNamesEquivalent(key, input)) {\n return { key, entry: packages[key] };\n }\n }\n\n // 2.5. Unambiguous child lookup: if input is NOT qualified, check for any qualified key\n // ending with /<input> \u2014 return if exactly one match (unambiguous).\n if (!isQualifiedName(input)) {\n const suffix = '/' + input.toLowerCase();\n const qualifiedMatches: { key: string; entry: WorkspaceIndexPackage }[] = [];\n for (const [key, entry] of Object.entries(packages)) {\n if (isQualifiedName(key) && key.toLowerCase().endsWith(suffix)) {\n qualifiedMatches.push({ key, entry });\n }\n }\n if (qualifiedMatches.length === 1) {\n return qualifiedMatches[0];\n }\n // If multiple matches, fall through (ambiguous \u2014 user must qualify)\n }\n\n // 3. Old\u2192new format normalization\n const normalized = normalizePackageNameForLookup(input);\n if (normalized !== input.toLowerCase()) {\n if (packages[normalized]) {\n return { key: normalized, entry: packages[normalized] };\n }\n for (const key of Object.keys(packages)) {\n if (arePackageNamesEquivalent(key, normalized)) {\n return { key, entry: packages[key] };\n }\n }\n }\n\n // 4. Resource name match \u2014 only if input looks like a resource ref (e.g. \"skills/skill-creator\")\n const spec = classifyResourceSpec(input);\n if (spec.kind === 'resource-ref') {\n const inputWithSlash = input + '/';\n for (const [key, entry] of Object.entries(packages)) {\n const files = entry.files;\n if (!files) continue;\n for (const sourceKey of Object.keys(files)) {\n if (sourceKey.startsWith(inputWithSlash) || sourceKey === input) {\n return { key, entry };\n }\n }\n }\n }\n\n return null;\n}\n", "/**\n * Platform Filtering Helpers Module\n * \n * Utilities for filtering source files based on platform specificity.\n */\n\nimport { basename } from 'path';\nimport type { Flow } from '../../../../types/flows.js';\nimport type { Platform } from '../../../platforms.js';\nimport {\n buildOverrideMap,\n shouldSkipUniversalFile,\n isPlatformSpecificFileForTarget\n} from '../../../flows/platform-suffix-handler.js';\nimport { isPlatformId } from '../../../platforms.js';\n\n/**\n * Filter flow sources by platform, removing files not applicable to target platform\n * \n * @param flowSources - Map of flows to source file paths\n * @param platform - Target platform\n * @returns Filtered map with only applicable sources\n */\nexport function filterSourcesByPlatform(\n flowSources: Map<Flow, string[]>,\n platform: Platform\n): Map<Flow, string[]> {\n const filtered = new Map<Flow, string[]>();\n \n // Build override map once for all sources\n const allSources: string[] = [];\n for (const sources of flowSources.values()) {\n allSources.push(...sources);\n }\n const overrideMap = buildOverrideMap(allSources);\n \n for (const [flow, sources] of flowSources) {\n const filteredSourcesForFlow: string[] = [];\n \n for (const sourceRel of sources) {\n // Skip platform-specific files not for this platform\n if (!isPlatformSpecificFileForTarget(sourceRel, platform) && \n sourceRel.includes('.') && \n sourceRel.split('.').length >= 3) {\n const parts = basename(sourceRel).split('.');\n const possiblePlatform = parts[parts.length - 2];\n if (possiblePlatform !== platform && isPlatformId(possiblePlatform)) {\n continue;\n }\n }\n \n // Skip universal files with platform overrides\n if (shouldSkipUniversalFile(sourceRel, platform, allSources, overrideMap)) {\n continue;\n }\n \n filteredSourcesForFlow.push(sourceRel);\n }\n \n if (filteredSourcesForFlow.length > 0) {\n filtered.set(flow, filteredSourcesForFlow);\n }\n }\n \n return filtered;\n}\n\n/**\n * Re-export utilities for convenience\n */\nexport { buildOverrideMap, shouldSkipUniversalFile, isPlatformSpecificFileForTarget };\n", "/**\n * Shared Import Pipeline\n *\n * Extracts the flow processing core that both install and sync-pull share.\n * Install uses the composable stages (to inject conflict resolution between\n * discovery and execution). Sync pull calls the composed function directly.\n *\n * Design: \"Sync owns status classification and selectivity.\n * Install's file processing pipeline owns the how.\"\n */\n\nimport type { Flow, FlowContext } from '../../types/flows.js';\nimport type { Platform } from '../platforms.js';\nimport type { PackageConversionContext } from '../../types/conversion-context.js';\nimport type { ExecutionResult } from './flow-execution-coordinator.js';\nimport { getPlatformDefinition, deriveRootDirFromFlows, platformUsesFlows } from '../platforms.js';\nimport { getApplicableFlows } from '../install/strategies/helpers/flow-helpers.js';\nimport { discoverFlowSources } from './flow-source-discovery.js';\nimport { executeFlowsForSources } from './flow-execution-coordinator.js';\nimport { filterSourcesByPlatform } from '../install/strategies/helpers/platform-filtering.js';\nimport { relative } from 'path';\nimport { deriveResourceLeafFromPackageName } from '../../utils/plugin-naming.js';\nimport { matchPackagePath } from '../../utils/match-path.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface ImportPipelineContext {\n packageName: string;\n packageRoot: string; // absolute path to package source\n workspaceRoot: string; // cwd\n platform: Platform;\n packageVersion: string;\n priority: number;\n dryRun: boolean;\n conversionContext: PackageConversionContext;\n matchedPattern?: string; // resource filtering (install only)\n}\n\nexport interface ImportPipelineOptions {\n /** Sync's selectivity \u2014 only process these source keys. undefined = all. */\n sourceKeyFilter?: Set<string>;\n}\n\nexport interface ImportPipelineResult {\n executionResult: ExecutionResult;\n /** Post-filter, pre-execution flow sources (for install's conflict resolver). */\n filteredFlowSources: Map<Flow, string[]>;\n}\n\n// ---------------------------------------------------------------------------\n// Stage 1: Build FlowContext\n// ---------------------------------------------------------------------------\n\n/**\n * Build a FlowContext with standard variables from an ImportPipelineContext.\n *\n * This is the standalone equivalent of BaseStrategy.buildFlowContext().\n */\nexport function buildImportFlowContext(\n ctx: ImportPipelineContext,\n direction: 'install' | 'save' = 'install',\n): FlowContext {\n const platformDef = getPlatformDefinition(ctx.platform, ctx.workspaceRoot);\n const resourceLeaf = deriveResourceLeafFromPackageName(ctx.packageName);\n\n // Use conversion context as single source of truth for original format\n const originalSource = ctx.conversionContext.originalFormat.platform || 'openpackage';\n\n return {\n workspaceRoot: ctx.workspaceRoot,\n packageRoot: ctx.packageRoot,\n platform: ctx.platform,\n packageName: ctx.packageName,\n direction,\n variables: {\n name: ctx.packageName,\n version: ctx.packageVersion,\n priority: ctx.priority,\n rootFile: platformDef.rootFile,\n rootDir: deriveRootDirFromFlows(platformDef),\n platform: ctx.platform,\n targetPlatform: ctx.platform,\n source: originalSource,\n sourcePlatform: originalSource,\n targetRoot: ctx.workspaceRoot,\n resourceLeaf,\n },\n dryRun: ctx.dryRun,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Stage 2: Discover and filter sources\n// ---------------------------------------------------------------------------\n\n/**\n * Discover flow sources, apply resource filtering, platform filtering,\n * and optional sourceKey scoping (for sync pull).\n *\n * Composes: discoverFlowSources -> applyResourceFiltering\n * -> filterSourcesByPlatform -> filterToScope\n */\nexport async function discoverAndFilterSources(\n flows: Flow[],\n ctx: ImportPipelineContext,\n flowCtx: FlowContext,\n opts?: ImportPipelineOptions,\n): Promise<Map<Flow, string[]>> {\n // 1. Discover sources\n const flowSources = await discoverFlowSources(flows, ctx.packageRoot, flowCtx);\n\n // 2. Apply resource filtering (matchedPattern-based)\n const resourceFiltered = applyResourceFiltering(\n flowSources,\n ctx.matchedPattern,\n ctx.packageRoot,\n );\n\n // 3. Filter by platform\n const platformFiltered = filterSourcesByPlatform(resourceFiltered, ctx.platform);\n\n // 4. Apply sourceKey scope filter (sync pull selectivity)\n if (opts?.sourceKeyFilter) {\n for (const [flow, sources] of platformFiltered) {\n const kept = sources.filter(s => opts.sourceKeyFilter!.has(s));\n if (kept.length > 0) {\n platformFiltered.set(flow, kept);\n } else {\n platformFiltered.delete(flow);\n }\n }\n }\n\n return platformFiltered;\n}\n\n// ---------------------------------------------------------------------------\n// Stage 3: Execute flows\n// ---------------------------------------------------------------------------\n\n/**\n * Execute flows on resolved sources.\n * Thin wrapper around executeFlowsForSources.\n */\nexport async function executeImportFlows(\n flowSources: Map<Flow, string[]>,\n flowCtx: FlowContext,\n): Promise<ExecutionResult> {\n return executeFlowsForSources(flowSources, flowCtx);\n}\n\n// ---------------------------------------------------------------------------\n// Composed: Full pipeline (for sync pull)\n// ---------------------------------------------------------------------------\n\n/**\n * Full import pipeline for a single platform.\n *\n * Equivalent to what FlowBasedInstallStrategy.install() does minus conflict\n * resolution. Sync pull calls this; install uses the stages individually.\n */\nexport async function processFlowsForPackage(\n ctx: ImportPipelineContext,\n opts?: ImportPipelineOptions,\n): Promise<ImportPipelineResult> {\n // Check if platform uses flows\n if (!platformUsesFlows(ctx.platform, ctx.workspaceRoot)) {\n return {\n executionResult: emptyExecutionResult(),\n filteredFlowSources: new Map(),\n };\n }\n\n // Get applicable flows\n const flows = getApplicableFlows(ctx.platform, ctx.workspaceRoot);\n if (flows.length === 0) {\n return {\n executionResult: emptyExecutionResult(),\n filteredFlowSources: new Map(),\n };\n }\n\n // Build context\n const flowCtx = buildImportFlowContext(ctx, 'install');\n\n // Discover and filter\n const filteredFlowSources = await discoverAndFilterSources(\n flows, ctx, flowCtx, opts,\n );\n\n // Execute\n const executionResult = await executeImportFlows(filteredFlowSources, flowCtx);\n\n return { executionResult, filteredFlowSources };\n}\n\n// ---------------------------------------------------------------------------\n// Standalone resource filtering (extracted from BaseStrategy)\n// ---------------------------------------------------------------------------\n\n/**\n * Filter flow sources based on a matched pattern (from base detection or\n * resource scoping). Standalone version of BaseStrategy.applyResourceFiltering().\n */\nexport function applyResourceFiltering(\n flowSources: Map<Flow, string[]>,\n matchedPattern: string | undefined,\n packageRoot: string,\n): Map<Flow, string[]> {\n if (!matchedPattern) {\n return flowSources;\n }\n\n const normalizedPattern = matchedPattern.replace(/\\\\/g, '/');\n const filteredSources = new Map<Flow, string[]>();\n\n for (const [flow, sources] of flowSources.entries()) {\n const filtered = sources.filter(sourcePath => {\n const isAbs = sourcePath.startsWith('/') || /^[A-Za-z]:[\\\\/]/.test(sourcePath);\n const normalizedSource = sourcePath.replace(/\\\\/g, '/');\n const relativePath = (\n isAbs ? relative(packageRoot, sourcePath) : normalizedSource\n ).replace(/\\\\/g, '/');\n\n return matchPackagePath(relativePath, normalizedPattern);\n });\n\n if (filtered.length > 0) {\n filteredSources.set(flow, filtered);\n }\n }\n\n return filteredSources;\n}\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\nfunction emptyExecutionResult(): ExecutionResult {\n return {\n success: true,\n filesProcessed: 0,\n filesWritten: 0,\n targetPaths: [],\n fileMapping: {},\n conflicts: [],\n errors: [],\n warnings: [],\n };\n}\n", "/**\n * Base Strategy Module\n * \n * Abstract base class providing shared functionality for installation strategies.\n */\n\nimport type { Platform } from '../../platforms.js';\nimport type { FlowContext } from '../../../types/flows.js';\nimport type { PackageFormat } from '../format-detector.js';\nimport type { InstallationStrategy, FlowInstallContext, FlowInstallResult } from './types.js';\nimport type { InstallOptions } from '../../../types/index.js';\nimport { logger } from '../../../utils/logger.js';\nimport { createEmptyResult } from './helpers/result-converter.js';\nimport { getApplicableFlows } from './helpers/flow-helpers.js';\nimport { logInstallationResult } from '../helpers/result-logging.js';\nimport { buildImportFlowContext, type ImportPipelineContext } from '../../flows/import-pipeline.js';\n\n/**\n * Abstract base class for installation strategies\n */\nexport abstract class BaseStrategy implements InstallationStrategy {\n abstract readonly name: string;\n \n abstract canHandle(format: PackageFormat, platform: Platform): boolean;\n \n abstract install(\n context: FlowInstallContext,\n options?: InstallOptions,\n forceOverwrite?: boolean\n ): Promise<FlowInstallResult>;\n \n /**\n * Create an empty result object\n */\n protected createEmptyResult(): FlowInstallResult {\n return createEmptyResult();\n }\n \n /**\n * Get applicable flows for a platform (global + platform-specific)\n */\n protected getApplicableFlows(platform: Platform, cwd: string) {\n return getApplicableFlows(platform, cwd);\n }\n \n /**\n * Build flow context with standard variables\n *\n * Delegates to the shared import pipeline's buildImportFlowContext().\n */\n protected buildFlowContext(\n context: FlowInstallContext,\n direction: 'install' | 'save' = 'install'\n ): FlowContext {\n const pipelineCtx: ImportPipelineContext = {\n packageName: context.packageName,\n packageRoot: context.packageRoot,\n workspaceRoot: context.workspaceRoot,\n platform: context.platform,\n packageVersion: context.packageVersion,\n priority: context.priority,\n dryRun: context.dryRun,\n conversionContext: context.conversionContext,\n matchedPattern: context.matchedPattern,\n };\n return buildImportFlowContext(pipelineCtx, direction);\n }\n \n /**\n * Log strategy selection for debugging\n */\n protected logStrategySelection(context: FlowInstallContext): void {\n // Strategy selection logging removed for cleaner output\n }\n \n /**\n * Log installation results using shared utility\n */\n protected logResults(result: FlowInstallResult, context: FlowInstallContext): void {\n logInstallationResult(\n result,\n context.packageName,\n context.platform,\n context.dryRun ?? false\n );\n }\n \n /**\n * Create an error result\n */\n protected createErrorResult(\n context: FlowInstallContext,\n error: Error,\n message: string\n ): FlowInstallResult {\n return {\n success: false,\n filesProcessed: 0,\n filesWritten: 0,\n conflicts: [],\n errors: [{\n flow: { from: context.packageRoot, to: context.workspaceRoot },\n sourcePath: context.packageRoot,\n error,\n message\n }],\n targetPaths: [],\n fileMapping: {}\n };\n }\n\n}\n", "/**\n * Platform Converter Module\n * \n * High-level orchestration for converting packages between formats:\n * - Platform-specific \u2192 Universal \u2192 Platform-specific\n * - Direct installation when source = target platform\n */\n\nimport { basename, dirname, join, relative } from 'path';\nimport { promises as fs } from 'fs';\nimport type { Package, PackageFile } from '../../types/index.js';\nimport type { PackageConversionContext } from '../../types/conversion-context.js';\nimport type { Platform } from '../platforms.js';\nimport type { Flow, FlowContext, FlowResult } from '../../types/flows.js';\nimport type { PackageFormat } from '../install/format-detector.js';\nimport { \n detectPackageFormat, \n isPlatformSpecific,\n needsConversion \n} from '../install/format-detector.js';\nimport { getPlatformDefinition, getGlobalImportFlows } from '../platforms.js';\nimport { createFlowExecutor } from './flow-executor.js';\nimport {\n extractCapturedName,\n getFirstFromPattern,\n resolvePattern\n} from './flow-source-discovery.js';\nimport { isSwitchExpression, resolveSwitchExpression } from './switch-resolver.js';\nimport { resolveTargetFromGlob } from './flow-execution-coordinator.js';\nimport { logger } from '../../utils/logger.js';\nimport { ensureDir, writeTextFile, readTextFile } from '../../utils/fs.js';\nimport { deriveResourceLeafFromPackageName } from '../../utils/plugin-naming.js';\nimport { tmpdir } from 'os';\nimport { mkdtemp, rm } from 'fs/promises';\nimport { minimatch } from 'minimatch';\nimport { \n createContextFromPackage,\n updateContextAfterConversion,\n withTargetPlatform \n} from '../conversion-context/index.js';\n\n/**\n * Conversion pipeline stage\n */\nexport interface ConversionStage {\n name: string;\n description: string;\n flows: Flow[];\n inverted: boolean;\n}\n\n/**\n * Conversion pipeline definition\n */\nexport interface ConversionPipeline {\n source: PackageFormat;\n target: Platform;\n stages: ConversionStage[];\n needsConversion: boolean;\n}\n\n/**\n * Conversion options\n */\nexport interface ConversionOptions {\n dryRun?: boolean;\n}\n\n/**\n * Conversion result with updated context\n */\nexport interface ConversionResult {\n success: boolean;\n convertedPackage?: Package;\n updatedContext?: PackageConversionContext;\n stages: Array<{\n stage: string;\n filesProcessed: number;\n success: boolean;\n error?: string;\n }>;\n}\n\n/**\n * Platform Converter\n * \n * Orchestrates multi-stage conversions between package formats\n */\nexport class PlatformConverter {\n private workspaceRoot: string;\n \n constructor(workspaceRoot: string) {\n this.workspaceRoot = workspaceRoot;\n }\n \n /**\n * Convert a package to target platform format with conversion context\n * \n * @param pkg - Package to convert\n * @param context - Conversion context (optional, will be created if not provided)\n * @param targetPlatform - Target platform\n * @param options - Conversion options\n * @returns Conversion result with updated context\n */\n async convert(\n pkg: Package,\n context: PackageConversionContext | undefined,\n targetPlatform: Platform,\n options?: ConversionOptions\n ): Promise<ConversionResult> {\n logger.info('Starting platform conversion', {\n package: pkg.metadata.name,\n targetPlatform,\n hasContext: !!context\n });\n \n // Create or use provided context\n const conversionContext = context || createContextFromPackage(pkg);\n \n // Set target platform\n const contextWithTarget = withTargetPlatform(conversionContext, targetPlatform);\n \n // Use provided format if available, otherwise detect from files\n const sourceFormat = pkg._format || detectPackageFormat(pkg.files);\n \n logger.debug('Source format', {\n type: sourceFormat.type,\n platform: sourceFormat.platform,\n confidence: sourceFormat.confidence,\n source: pkg._format ? 'provided' : 'detected',\n contextOriginal: conversionContext.originalFormat.platform\n });\n \n // Build conversion pipeline\n const pipeline = this.buildPipeline(sourceFormat, targetPlatform);\n \n if (!pipeline.needsConversion) {\n logger.info('No conversion needed - formats match');\n return {\n success: true,\n convertedPackage: pkg,\n updatedContext: contextWithTarget,\n stages: []\n };\n }\n \n // Execute pipeline with context\n return await this.executePipeline(pkg, contextWithTarget, pipeline, options);\n }\n \n /**\n * Build conversion pipeline based on source and target formats\n */\n buildPipeline(\n sourceFormat: PackageFormat,\n targetPlatform: Platform\n ): ConversionPipeline {\n const stages: ConversionStage[] = [];\n const needsConv = needsConversion(sourceFormat, targetPlatform);\n \n logger.info('Checking if conversion needed', {\n sourceType: sourceFormat.type,\n sourcePlatform: sourceFormat.platform,\n targetPlatform,\n needsConversion: needsConv\n });\n \n if (!needsConv) {\n return {\n source: sourceFormat,\n target: targetPlatform,\n stages: [],\n needsConversion: false\n };\n }\n \n // Platform-specific \u2192 Universal\n if (isPlatformSpecific(sourceFormat) && sourceFormat.platform) {\n const sourcePlatform = sourceFormat.platform;\n \n // Get source platform import flows (these transform workspace \u2192 package)\n const platformDef = getPlatformDefinition(sourcePlatform, this.workspaceRoot);\n const platformImportFlows = platformDef.import || [];\n const globalImportFlows = getGlobalImportFlows(this.workspaceRoot) || [];\n \n const allImportFlows = [...globalImportFlows, ...platformImportFlows];\n \n logger.info(`Building conversion stage with ${allImportFlows.length} import flows`, {\n sourcePlatform,\n flowCount: allImportFlows.length\n });\n\n // IMPORTANT:\n // These \"import\" flows are defined to read platform-specific paths (e.g. \".claude-plugin/plugin.json\")\n // from the source package and write universal outputs (e.g. \"openpackage.yml\").\n //\n // We should NOT strip the platform prefix here. Doing so turns \".claude-plugin/plugin.json\" into\n // \"plugin.json\", which will not exist in a real Claude plugin repository and makes the\n // platform-to-universal stage a no-op (causing infinite re-detection/re-conversion loops).\n const adjustedFlows = allImportFlows;\n \n stages.push({\n name: 'platform-to-universal',\n description: `Convert from ${sourcePlatform} format to universal format`,\n flows: adjustedFlows,\n inverted: false // Not inverted - using import flows directly\n });\n }\n \n // Universal \u2192 Target Platform\n // Note: This stage will be handled by the normal flow-based installer\n // We only need to convert TO universal here; the installer handles universal \u2192 platform\n \n return {\n source: sourceFormat,\n target: targetPlatform,\n stages,\n needsConversion: true\n };\n }\n \n /**\n * Execute conversion pipeline with context\n */\n async executePipeline(\n pkg: Package,\n context: PackageConversionContext,\n pipeline: ConversionPipeline,\n options?: ConversionOptions\n ): Promise<ConversionResult> {\n const result: ConversionResult = {\n success: true,\n stages: []\n };\n \n let currentPackage = pkg;\n let currentContext = context;\n const dryRun = options?.dryRun ?? false;\n \n // Create temporary directory for intermediate files\n let tempDir: string | null = null;\n \n try {\n tempDir = await mkdtemp(join(tmpdir(), 'opkg-convert-'));\n \n for (const stage of pipeline.stages) {\n logger.info(`Executing conversion stage: ${stage.name}`);\n \n const stageResult = await this.executeStage(\n currentPackage,\n currentContext,\n stage,\n tempDir,\n dryRun,\n pipeline.target\n );\n \n result.stages.push({\n stage: stage.name,\n filesProcessed: stageResult.filesProcessed,\n success: stageResult.success,\n error: stageResult.error\n });\n \n if (!stageResult.success) {\n result.success = false;\n result.updatedContext = currentContext;\n return result;\n }\n \n // Update package with converted files\n if (stageResult.convertedFiles) {\n currentPackage = {\n ...currentPackage,\n files: stageResult.convertedFiles,\n // Update format to universal (source platform tracked in context)\n _format: {\n type: 'universal',\n platform: undefined,\n confidence: 1.0,\n analysis: {\n universalFiles: stageResult.convertedFiles.length,\n platformSpecificFiles: 0,\n detectedPlatforms: new Map(),\n totalFiles: stageResult.convertedFiles.length,\n samplePaths: {\n universal: stageResult.convertedFiles.slice(0, 3).map(f => f.path),\n platformSpecific: []\n }\n }\n }\n };\n \n // Update context after conversion\n currentContext = updateContextAfterConversion(\n currentContext,\n { type: 'universal', platform: undefined },\n pipeline.target\n );\n }\n }\n \n result.convertedPackage = currentPackage;\n result.updatedContext = currentContext;\n return result;\n \n } catch (error) {\n logger.error('Conversion pipeline failed', { error });\n result.success = false;\n result.updatedContext = currentContext;\n result.stages.push({\n stage: 'pipeline',\n filesProcessed: 0,\n success: false,\n error: (error as Error).message\n });\n return result;\n \n } finally {\n // Cleanup temp directory\n if (tempDir) {\n try {\n await rm(tempDir, { recursive: true, force: true });\n } catch (error) {\n logger.warn('Failed to cleanup temp directory', { tempDir, error });\n }\n }\n }\n }\n \n /**\n * Discover files matching a glob pattern\n */\n private async discoverMatchingFiles(\n pattern: string | string[] | import('../../types/flows.js').SwitchExpression,\n baseDir: string\n ): Promise<string[]> {\n // Handle switch expressions\n if (typeof pattern === 'object' && '$switch' in pattern) {\n throw new Error('Cannot discover files from SwitchExpression - expression must be resolved first');\n }\n const patterns = Array.isArray(pattern) ? pattern : [pattern];\n const matches: string[] = [];\n \n // Walk all files in baseDir\n async function* walkFiles(dir: string): AsyncGenerator<string> {\n const entries = await fs.readdir(dir, { withFileTypes: true });\n \n for (const entry of entries) {\n const fullPath = join(dir, entry.name);\n \n if (entry.isDirectory()) {\n yield* walkFiles(fullPath);\n } else if (entry.isFile()) {\n yield fullPath;\n }\n }\n }\n \n // Check each file against the patterns\n for await (const filePath of walkFiles(baseDir)) {\n const relativePath = relative(baseDir, filePath);\n \n // Check if file matches any pattern (with priority - first match wins for arrays)\n for (const p of patterns) {\n // IMPORTANT: conversion must match dotfiles like \".claude-plugin/plugin.json\".\n // If dotfiles don't match, platform-to-universal stages can become no-ops and loop forever.\n if (minimatch(relativePath, p, { dot: true })) {\n matches.push(filePath);\n break; // Only match once per file\n }\n }\n }\n \n return matches;\n }\n \n /**\n * Execute a single conversion stage with context\n */\n private async executeStage(\n pkg: Package,\n context: PackageConversionContext,\n stage: ConversionStage,\n tempDir: string,\n dryRun: boolean,\n targetPlatform: Platform\n ): Promise<{\n success: boolean;\n filesProcessed: number;\n convertedFiles?: PackageFile[];\n error?: string;\n }> {\n try {\n const executor = createFlowExecutor();\n const convertedFiles: PackageFile[] = [];\n let filesProcessed = 0;\n const matchedSources = new Set<string>();\n \n // Create isolated input/output roots for this stage\n const stageRoot = join(tempDir, stage.name);\n const packageRoot = join(stageRoot, 'in');\n const outputRoot = join(stageRoot, 'out');\n await ensureDir(packageRoot);\n await ensureDir(outputRoot);\n \n // Write package files to temp directory\n for (const file of pkg.files) {\n const filePath = join(packageRoot, file.path);\n await ensureDir(join(filePath, '..'));\n await writeTextFile(filePath, file.content);\n }\n \n // Build flow context with proper platform variables for conditional evaluation\n // During conversion, we set:\n // - $$platform = target platform (for conditionals like \"$eq\": [\"$$platform\", \"claude\"])\n // - $$source = original source format (for conditionals like \"$eq\": [\"$$source\", \"claude-plugin\"])\n const resourceLeaf = deriveResourceLeafFromPackageName(pkg.metadata.name);\n const flowContext: FlowContext = {\n workspaceRoot: outputRoot, // Write outputs away from inputs\n packageRoot,\n platform: targetPlatform, // Use target platform for conditional evaluation\n packageName: pkg.metadata.name,\n direction: 'install', // Always use 'install' direction for conversion\n variables: {\n name: pkg.metadata.name,\n version: pkg.metadata.version || '0.0.0',\n platform: targetPlatform, // For conditional: \"$eq\": [\"$$platform\", \"claude\"]\n source: context.originalFormat.platform || 'openpackage', // Use context for $$source\n sourcePlatform: context.originalFormat.platform || 'openpackage', // Use context for sourcePlatform\n targetPlatform: targetPlatform,\n resourceLeaf\n },\n dryRun\n };\n \n // Execute flows for each matching file\n for (const flow of stage.flows) {\n // Discover files that match the flow's 'from' pattern\n const matchingFiles = await this.discoverMatchingFiles(\n flow.from,\n packageRoot\n );\n \n logger.info(`Flow pattern matching`, {\n pattern: flow.from,\n matchCount: matchingFiles.length,\n matches: matchingFiles.map(f => relative(packageRoot, f))\n });\n \n if (matchingFiles.length === 0) {\n logger.debug('No files match flow pattern', { \n pattern: flow.from,\n packageRoot \n });\n continue;\n }\n \n // Execute flow for each matching file\n for (const sourceFile of matchingFiles) {\n const sourceRelative = relative(packageRoot, sourceFile);\n matchedSources.add(sourceRelative);\n\n // Preserve the original source pattern when deriving the target path.\n // Converting the flow to a literal `from` too early breaks generic\n // mappings like `**/skills/**/* -> skills/**/*` by producing\n // `skills/config/skills/...` instead of anchoring at the `skills/` dir.\n const firstFromPattern = getFirstFromPattern(flow.from);\n const capturedName = extractCapturedName(sourceRelative, firstFromPattern);\n const sourceContext: FlowContext = {\n ...flowContext,\n variables: {\n ...flowContext.variables,\n sourcePath: sourceRelative,\n sourceDir: dirname(sourceRelative),\n sourceFile: basename(sourceRelative),\n ...(capturedName ? { capturedName } : {})\n }\n };\n\n let concreteFlow: Flow = {\n ...flow,\n from: sourceRelative\n };\n\n if (typeof flow.to === 'string' || isSwitchExpression(flow.to)) {\n const rawToPattern = typeof flow.to === 'string'\n ? flow.to\n : resolveSwitchExpression(flow.to, sourceContext);\n const resolvedToPattern = resolvePattern(rawToPattern, sourceContext, capturedName);\n const targetAbs = resolveTargetFromGlob(\n sourceFile,\n firstFromPattern,\n resolvedToPattern,\n sourceContext\n );\n const targetRel = relative(outputRoot, targetAbs);\n\n concreteFlow = {\n ...concreteFlow,\n to: targetRel\n };\n }\n\n const flowResult = await executor.executeFlow(concreteFlow, sourceContext);\n \n if (!flowResult.success) {\n return {\n success: false,\n filesProcessed,\n error: `Flow execution failed for ${sourceRelative}: ${flowResult.error?.message}`\n };\n }\n \n filesProcessed++;\n \n // Collect transformed files\n if (typeof flowResult.target === 'string') {\n const targetPath = relative(outputRoot, flowResult.target);\n \n // Read transformed file content\n try {\n const content = await readTextFile(flowResult.target);\n \n convertedFiles.push({\n path: targetPath,\n content,\n encoding: 'utf8'\n });\n } catch (error) {\n logger.warn('Failed to read converted file', { \n target: flowResult.target, \n error \n });\n }\n }\n }\n }\n\n return {\n success: true,\n filesProcessed,\n convertedFiles: convertedFiles.length > 0 ? convertedFiles : undefined\n };\n \n } catch (error) {\n logger.error('Stage execution failed', { stage: stage.name, error });\n return {\n success: false,\n filesProcessed: 0,\n error: (error as Error).message\n };\n }\n }\n}\n\n/**\n * Create a platform converter instance\n */\nexport function createPlatformConverter(workspaceRoot: string): PlatformConverter {\n return new PlatformConverter(workspaceRoot);\n}\n", "/**\n * Standard Flow-Based Installation Strategy\n * \n * Applies platform flows with full transformations.\n * Used for universal format packages.\n */\n\nimport { join, relative, dirname, basename, extname } from 'path';\nimport type { Platform } from '../../platforms.js';\nimport type { PackageFormat } from '../format-detector.js';\nimport type { InstallOptions } from '../../../types/index.js';\nimport type { FlowInstallContext, FlowInstallResult } from './types.js';\nimport type { Flow, FlowContext } from '../../../types/flows.js';\nimport { BaseStrategy } from './base-strategy.js';\nimport { platformUsesFlows } from '../../platforms.js';\nimport { convertToInstallResult } from './helpers/result-converter.js';\nimport {\n resolveTargetFromGlob,\n} from '../../flows/flow-execution-coordinator.js';\nimport { isPassThroughFlow } from '../../flows/flow-executor.js';\nimport {\n resolvePattern,\n extractCapturedName,\n getFirstFromPattern,\n} from '../../flows/flow-source-discovery.js';\nimport { resolveSwitchExpression } from '../../flows/switch-resolver.js';\nimport { extractToPatternString } from '../../flows/to-pattern-extractor.js';\nimport {\n buildImportFlowContext,\n discoverAndFilterSources,\n executeImportFlows,\n type ImportPipelineContext,\n} from '../../flows/import-pipeline.js';\nimport {\n buildOwnershipContext,\n resolveConflictsForTargets,\n type TargetEntry,\n} from '../conflicts/file-conflict-resolver.js';\nimport { normalizePathForProcessing } from '../../../utils/path-normalization.js';\nimport { readTextFile } from '../../../utils/fs.js';\nimport { MARKDOWN_EXTENSIONS } from '../../../constants/index.js';\nimport { mergeInlinePlatformOverride } from '../../platform-yaml-merge.js';\nimport { logger } from '../../../utils/logger.js';\nimport { readWorkspaceIndex } from '../../../utils/workspace-index-yml.js';\n\ninterface ComputedTargets {\n entries: TargetEntry[];\n sourceMap: Array<{ flow: Flow; sourceRel: string }>;\n}\n\n/**\n * Standard Flow-Based Installation Strategy\n *\n * Applies platform flows with full transformations.\n * Used for universal format packages.\n */\nexport class FlowBasedInstallStrategy extends BaseStrategy {\n readonly name = 'flow-based';\n \n canHandle(format: PackageFormat, platform: Platform): boolean {\n // Default strategy - handles all remaining cases\n return true;\n }\n \n async install(\n context: FlowInstallContext,\n options?: InstallOptions,\n forceOverwrite: boolean = false\n ): Promise<FlowInstallResult> {\n const { packageName, packageRoot, workspaceRoot, platform, dryRun } = context;\n\n this.logStrategySelection(context);\n // Check if platform uses flows\n if (!platformUsesFlows(platform, workspaceRoot)) {\n return this.createEmptyResult();\n }\n\n // Get applicable flows\n const flows = this.getApplicableFlows(platform, workspaceRoot);\n if (flows.length === 0) {\n return this.createEmptyResult();\n }\n\n // Build shared pipeline context (used for both FlowContext and discovery)\n const pipelineCtx: ImportPipelineContext = {\n packageName, packageRoot, workspaceRoot, platform,\n packageVersion: context.packageVersion,\n priority: context.priority,\n dryRun: context.dryRun,\n conversionContext: context.conversionContext,\n matchedPattern: context.matchedPattern,\n };\n const flowContext = buildImportFlowContext(pipelineCtx, 'install');\n\n // Use shared pipeline stages for discovery + filtering\n const filteredSources = await discoverAndFilterSources(\n flows, pipelineCtx, flowContext,\n );\n\n // -----------------------------------------------------------------------\n // File-level conflict resolution (Phase 3)\n // -----------------------------------------------------------------------\n const effectiveOptions = options ?? {};\n const conflictWarnings: string[] = [];\n let wasNamespaced = false;\n let conflictRelocatedFiles: Array<{ from: string; to: string }> = [];\n\n try {\n // Pre-compute the target paths that will be written by the flows\n const computed = this.computeTargetEntries(filteredSources, flowContext);\n const targets = computed.entries;\n\n if (targets.length > 0) {\n // Use shared ownership context (parallel mode) or build fresh (sequential mode)\n let ownershipContext;\n if (context.sharedOwnershipContext) {\n ownershipContext = context.sharedOwnershipContext;\n } else {\n // Use pre-read record when available to avoid redundant disk reads\n const wsRecord = context.previousIndexRecord;\n const previousRecord = wsRecord\n ? this.extractPreviousRecord(wsRecord, packageName)\n : await this.readPreviousIndexRecord(workspaceRoot, packageName);\n\n // Build ownership context (other-package indexes + previous-owned paths)\n ownershipContext = await buildOwnershipContext(\n workspaceRoot,\n packageName,\n previousRecord,\n wsRecord\n );\n }\n\n // Extract persisted namespace from pre-read record to avoid a redundant readWorkspaceIndex\n const persistedNamespace = context.previousIndexRecord\n ?.index.packages?.[packageName]?.namespace;\n\n // Resolve conflicts \u2014 get back the filtered set of allowed targets\n const { allowedTargets, warnings, packageWasNamespaced, namespaceDir, relocatedFiles, claimedFiles } = await resolveConflictsForTargets(\n workspaceRoot,\n targets,\n ownershipContext,\n effectiveOptions,\n packageName,\n forceOverwrite,\n context.prompt,\n undefined,\n context.indexWriteCollector,\n persistedNamespace\n );\n conflictWarnings.push(...warnings);\n wasNamespaced = packageWasNamespaced;\n conflictRelocatedFiles = relocatedFiles;\n\n // Build target path remap and allowed original paths in a single pass.\n // Index allowedTargets by composite key for O(1) lookup.\n const allowedByKey = new Map<string, TargetEntry>();\n for (const at of allowedTargets) {\n allowedByKey.set(`${at.sourceAbsPath}\\0${at.flowToPattern}`, at);\n }\n\n const targetPathRemap = new Map<string, string>();\n const allowedOriginalPaths = new Set<string>();\n for (const t of targets) {\n const key = `${t.sourceAbsPath}\\0${t.flowToPattern}`;\n const match = allowedByKey.get(key);\n if (!match) continue;\n\n const origNorm = normalizePathForProcessing(t.relPath);\n allowedOriginalPaths.add(origNorm);\n\n if (packageWasNamespaced) {\n const newNorm = normalizePathForProcessing(match.relPath);\n if (origNorm !== newNorm) {\n targetPathRemap.set(origNorm, newNorm);\n }\n }\n }\n const prunedSources = this.buildPrunedSources(computed, allowedOriginalPaths);\n\n // Pass remap into flow execution context\n const execFlowContext: FlowContext = targetPathRemap.size > 0\n ? { ...flowContext, targetPathRemap }\n : flowContext;\n\n // Execute flows on the pruned source set (shared pipeline stage)\n const executionResult = await executeImportFlows(prunedSources, execFlowContext);\n const result = convertToInstallResult(executionResult, packageName, platform, dryRun);\n\n // Surface schema validation warnings\n if (executionResult.warnings?.length) {\n for (const w of executionResult.warnings) {\n logger.warn(w);\n }\n }\n\n // Surface conflict warnings as additional FlowConflictReport entries\n for (const msg of conflictWarnings) {\n logger.warn(msg);\n result.conflicts.push({\n targetPath: '',\n packages: [{ packageName, priority: 0, chosen: true }],\n message: msg\n });\n }\n\n // Attach namespace metadata to the result\n result.namespaced = wasNamespaced;\n result.namespaceSlug = namespaceDir;\n result.relocatedFiles = conflictRelocatedFiles;\n result.claimedFiles = claimedFiles;\n\n this.logResults(result, context);\n return result;\n }\n } catch (error) {\n // Conflict resolution is best-effort: on unexpected failure log and continue\n logger.warn(`File conflict resolution failed for ${packageName}: ${error}. Proceeding without conflict checks.`);\n }\n\n // Execute flows (no targets to conflict-check, or conflict resolution errored)\n const executionResult = await executeImportFlows(filteredSources, flowContext);\n\n // Surface schema validation warnings\n if (executionResult.warnings?.length) {\n for (const w of executionResult.warnings) {\n logger.warn(w);\n }\n }\n\n // Convert to result\n const result = convertToInstallResult(executionResult, packageName, platform, dryRun);\n\n this.logResults(result, context);\n \n return result;\n }\n\n // -------------------------------------------------------------------------\n // Helpers\n // -------------------------------------------------------------------------\n\n /**\n * Pre-compute the workspace-relative target path for each (flow, source) pair\n * using the same resolution logic as the flow execution coordinator.\n * Each entry is annotated with the resolved `to` pattern and merge-flow flag\n * so that the conflict resolver can derive namespace insertion points and\n * correctly exclude merge flows from namespacing.\n */\n private computeTargetEntries(\n flowSources: Map<Flow, string[]>,\n flowContext: FlowContext\n ): ComputedTargets {\n const entries: TargetEntry[] = [];\n const sourceMap: Array<{ flow: Flow; sourceRel: string }> = [];\n\n for (const [flow, sources] of flowSources) {\n const firstPattern = getFirstFromPattern(flow.from);\n // A flow is a merge flow when its merge strategy is not plain 'replace'\n // (deep, shallow, and composite all produce merged/combined output)\n const isMergeFlow = Boolean(\n flow.merge && flow.merge !== 'replace'\n );\n\n for (const sourceRel of sources) {\n try {\n const sourceAbs = join(flowContext.packageRoot, sourceRel);\n const capturedName = extractCapturedName(sourceRel, firstPattern);\n\n const sourceContext: FlowContext = {\n ...flowContext,\n variables: {\n ...flowContext.variables,\n sourcePath: sourceRel,\n sourceDir: dirname(sourceRel),\n sourceFile: basename(sourceRel),\n ...(capturedName ? { capturedName } : {})\n }\n };\n\n const rawToPattern = extractToPatternString(\n flow.to,\n (sw) => resolveSwitchExpression(sw, sourceContext)\n ) ?? '';\n const resolvedToPattern = resolvePattern(rawToPattern, sourceContext, capturedName);\n const targetAbs = resolveTargetFromGlob(\n sourceAbs,\n firstPattern,\n resolvedToPattern,\n sourceContext\n );\n\n const targetRelRaw = relative(flowContext.workspaceRoot, targetAbs);\n const targetRel = normalizePathForProcessing(targetRelRaw);\n\n // Always provide sourceAbsPath for non-merge flows so the conflict\n // resolver can fall back to a raw-source comparison when needed.\n const provideSourcePath = !isMergeFlow;\n\n // For non-pass-through markdown flows during a platform install the\n // executor will apply mergeInlinePlatformOverride(). Build a lazy\n // callback so the conflict resolver can compare the *transformed*\n // output (not just the raw source) against what's already on disk.\n let resolveOutputContent: (() => Promise<string>) | undefined;\n if (\n provideSourcePath\n && !isPassThroughFlow(flow, sourceAbs, targetAbs, flowContext)\n && MARKDOWN_EXTENSIONS.has(extname(sourceAbs).toLowerCase())\n && flowContext.platform\n && flowContext.direction === 'install'\n ) {\n const capturedSourceAbs = sourceAbs;\n const capturedPlatform = flowContext.platform;\n const capturedWorkspaceRoot = flowContext.workspaceRoot;\n resolveOutputContent = async () => {\n const raw = await readTextFile(capturedSourceAbs, 'utf8');\n return mergeInlinePlatformOverride(raw, capturedPlatform, capturedWorkspaceRoot);\n };\n }\n\n entries.push({\n relPath: targetRel,\n absPath: targetAbs,\n flowToPattern: resolvedToPattern,\n isMergeFlow,\n sourceAbsPath: provideSourcePath ? sourceAbs : undefined,\n resolveOutputContent,\n });\n sourceMap.push({ flow, sourceRel });\n } catch {\n // If target resolution fails for a source, skip it \u2014 the executor\n // will handle the error properly during execution.\n }\n }\n }\n\n return { entries, sourceMap };\n }\n\n /**\n * Build a pruned flow\u2192sources map by checking each pre-computed target entry\n * against the set of allowed original paths. Merge-flow entries are always kept.\n */\n private buildPrunedSources(\n computed: ComputedTargets,\n allowedOriginalPaths: Set<string>\n ): Map<Flow, string[]> {\n const pruned = new Map<Flow, string[]>();\n for (let i = 0; i < computed.entries.length; i++) {\n const entry = computed.entries[i];\n const { flow, sourceRel } = computed.sourceMap[i];\n if (entry.isMergeFlow || allowedOriginalPaths.has(normalizePathForProcessing(entry.relPath))) {\n if (!pruned.has(flow)) pruned.set(flow, []);\n pruned.get(flow)!.push(sourceRel);\n }\n }\n return pruned;\n }\n\n /**\n * Extract the previous index record from a pre-read workspace index (no I/O).\n */\n private extractPreviousRecord(\n wsRecord: import('../../../utils/workspace-index-yml.js').WorkspaceIndexRecord,\n packageName: string\n ): { files: Record<string, any[]> } | null {\n const entry = wsRecord.index.packages?.[packageName];\n if (!entry) return null;\n return { files: entry.files ?? {} };\n }\n\n /**\n * Read the package's existing workspace-index entry (its files mapping),\n * used to determine which paths were previously owned by this package.\n * Fallback for when no pre-read record is available.\n */\n private async readPreviousIndexRecord(\n cwd: string,\n packageName: string\n ): Promise<{ files: Record<string, any[]> } | null> {\n try {\n const wsRecord = await readWorkspaceIndex(cwd);\n const entry = wsRecord.index.packages?.[packageName];\n if (!entry) return null;\n return { files: entry.files ?? {} };\n } catch {\n return null;\n }\n }\n}\n\n", "/**\n * File-Level Conflict Resolver\n *\n * Detects and resolves file-level conflicts for flow-based installs.\n * Uses package-name namespacing to organise conflicting files into\n * per-package subdirectories, preserving all versions on disk.\n */\n\nimport { dirname, join } from 'path';\nimport { promises as fs } from 'fs';\n\nimport { exists, ensureDir, readTextFile, walkFiles } from '../../../utils/fs.js';\nimport { normalizePathForProcessing } from '../../../utils/path-normalization.js';\nimport { formatPathForYaml } from '../../../utils/path-resolution.js';\nimport { calculateFileHash } from '../../../utils/hash-utils.js';\nimport { getTargetPath } from '../../../utils/workspace-index-helpers.js';\nimport { sortMapping, isDirKey, ensureTrailingSlash } from '../../../utils/package-index-yml.js';\nimport { readWorkspaceIndex, writeWorkspaceIndex } from '../../../utils/workspace-index-yml.js';\nimport { getRegistryDirectories } from '../../directory.js';\nimport { sep } from 'path';\nimport type { PromptPort } from '../../ports/prompt.js';\nimport { resolvePrompt } from '../../ports/resolve.js';\nimport { logger } from '../../../utils/logger.js';\nimport { deriveNamespaceSlug } from '../../../utils/plugin-naming.js';\nimport type { InstallOptions } from '../../../types/index.js';\nimport { generatePrefixedLeafPath } from './namespace-path.js';\nimport type { WorkspaceConflictOwner } from '../../../utils/workspace-index-ownership.js';\nimport type { WorkspaceIndexFileMapping } from '../../../types/workspace-index.js';\nimport type { IndexWriteCollector } from '../wave-resolver/index-write-collector.js';\nimport {\n loadOtherPackageIndexes,\n loadOtherPackageIndexesFromRecord,\n buildExpandedIndexesContext,\n type ExpandedIndexesContext,\n} from '../index-based-installer.js';\nimport type { WorkspaceIndexRecord } from '../../../utils/workspace-index-yml.js';\n\n// ============================================================================\n// Internal Types\n// ============================================================================\n\ntype ConflictResolution = 'namespace' | 'skip' | 'overwrite';\n\ninterface PackageIndexRecord {\n path: string;\n packageName: string;\n workspace: {\n version: string;\n hash?: string;\n };\n files: Record<string, (string | WorkspaceIndexFileMapping)[]>;\n}\n\n// ============================================================================\n// Public Types\n// ============================================================================\n\nexport type FileConflictType = 'none' | 'owned-by-other' | 'exists-unowned';\n\nexport interface FileConflictInfo {\n type: FileConflictType;\n /** Set when type === 'owned-by-other' */\n owner?: WorkspaceConflictOwner;\n}\n\nexport interface OwnershipContext {\n expandedIndexes: ExpandedIndexesContext;\n /** Paths the current package already owned (so re-installs skip the conflict check) */\n previousOwnedPaths: Set<string>;\n /** Raw index records keyed by package name (needed for namespace index updates) */\n indexByPackage: Map<string, PackageIndexRecord>;\n /**\n * Paths written by the current package during earlier platform iterations in this\n * install run. Used when installing to multiple platforms that share target paths\n * (e.g. amp, kimi, replit all use .agents/skills/). Prevents later platforms from\n * treating earlier writes as \"exists-unowned\" and incorrectly namespacing.\n */\n currentRunWrittenPaths?: Set<string>;\n}\n\n/** A resolved target path together with the content that will be written there */\nexport interface TargetEntry {\n /** Workspace-relative path (forward-slash, normalised) */\n relPath: string;\n /** Absolute path */\n absPath: string;\n /**\n * Content of the source file. Used only for the 'exists-unowned' branch\n * where we compare content before deciding whether a conflict exists.\n * Leave undefined if you want to skip the content-diff check for that branch.\n */\n content?: string;\n /**\n * Absolute path to the source file. Set for pass-through flows (where\n * source content == target content) so the conflict resolver can lazily\n * read and compare only when an 'exists-unowned' check is actually needed.\n * Takes precedence over `content` when both are undefined \u2014 the resolver\n * reads the source on demand instead of requiring an eager read.\n */\n sourceAbsPath?: string;\n /**\n * Lazily compute the *transformed* output content for this target.\n * Used by non-pass-through flows (e.g. markdown with platform overrides)\n * where the on-disk source differs from what the executor would write.\n * The conflict resolver uses a three-tier cascade:\n * 1. `content` (eager, set by caller)\n * 2. `resolveOutputContent()` (lazy, transformed output)\n * 3. `sourceAbsPath` read (lazy, raw source \u2014 pass-through only)\n */\n resolveOutputContent?: () => Promise<string>;\n /**\n * The resolved `flow.to` pattern that produced this target path.\n * Used to derive the namespace insertion point (the base directory of the\n * pattern, i.e. everything before the first glob character).\n * e.g. \"rules/**\\/*.md\" \u2192 base = \"rules\"\n */\n flowToPattern?: string;\n /**\n * True when the flow that produced this entry uses a merge strategy\n * (deep, shallow, or composite). Merge-flow targets are excluded from\n * namespacing because they intentionally combine content from multiple\n * packages into a single file (e.g. mcp.json, settings.json).\n */\n isMergeFlow?: boolean;\n}\n\n/** A file that was physically moved from one location to another during namespace resolution */\nexport interface RelocatedFile {\n /** Original workspace-relative path before relocation */\n from: string;\n /** New workspace-relative path after relocation */\n to: string;\n}\n\nexport interface ConflictResolutionResult {\n /** Targets that should proceed to flow execution (paths may be rewritten to namespaced form) */\n allowedTargets: TargetEntry[];\n /** Human-readable warnings/notes accumulated during resolution */\n warnings: string[];\n /**\n * True when at least one conflict triggered bulk namespacing for the\n * installing package. The flow-based strategy uses this to rewrite the\n * flow `to` patterns before executing flows so that the executor writes\n * files to the correct namespaced locations.\n */\n packageWasNamespaced: boolean;\n /**\n * The package name used as the namespace directory segment.\n * Only set when packageWasNamespaced is true.\n */\n namespaceDir?: string;\n /**\n * Files that were physically relocated on disk during namespace resolution.\n * These are files owned by *other* packages that were moved into their\n * own namespace subdirectories to make room for the incoming package.\n */\n relocatedFiles: RelocatedFile[];\n /** Workspace-relative paths of files that were auto-claimed (unowned on disk, content identical) */\n claimedFiles: string[];\n}\n\n// ============================================================================\n// Private: expand a package's index to its owned file paths\n// (moved from index-based-installer.ts)\n// ============================================================================\n\nexport async function collectFilesUnderDirectory(cwd: string, dirRelPath: string): Promise<string[]> {\n const directoryRel = ensureTrailingSlash(normalizePathForProcessing(dirRelPath));\n const absDir = join(cwd, directoryRel);\n if (!(await exists(absDir))) {\n return [];\n }\n\n const collected: string[] = [];\n try {\n for await (const absFile of walkFiles(absDir)) {\n // normalizeRelativePath inline\n const { relative: relFn } = await import('path');\n const rel = relFn(cwd, absFile);\n collected.push(normalizePathForProcessing(rel).replace(/\\\\/g, '/'));\n }\n } catch (error) {\n logger.warn(`Failed to enumerate directory ${absDir}: ${error}`);\n }\n return collected;\n}\n\nexport async function expandIndexToFilePaths(\n cwd: string,\n index: PackageIndexRecord | null\n): Promise<Set<string>> {\n const owned = new Set<string>();\n if (!index) return owned;\n\n for (const [key, values] of Object.entries(index.files)) {\n if (isDirKey(key)) {\n for (const mapping of values) {\n const dirRel = getTargetPath(mapping);\n const files = await collectFilesUnderDirectory(cwd, dirRel);\n for (const rel of files) {\n owned.add(normalizePathForProcessing(rel));\n }\n }\n } else {\n for (const mapping of values) {\n const value = getTargetPath(mapping);\n owned.add(normalizePathForProcessing(value));\n }\n }\n }\n\n return owned;\n}\n\n// ============================================================================\n// Private: prompt helpers\n// ============================================================================\n\nasync function promptConflictResolution(message: string, prompt: PromptPort): Promise<ConflictResolution> {\n return prompt.select<ConflictResolution>(\n message,\n [\n { title: 'Namespace (organise by package name)', value: 'namespace' },\n { title: 'Skip (keeps existing)', value: 'skip' },\n { title: 'Overwrite (replaces existing)', value: 'overwrite' }\n ]\n );\n}\n\nasync function promptContentDifferenceResolution(\n workspacePath: string,\n prompt: PromptPort,\n sourcePath?: string\n): Promise<'overwrite' | 'skip'> {\n const formattedSource = sourcePath\n ? (sourcePath.startsWith('/') ? sourcePath : `/${sourcePath}`)\n : undefined;\n const message = formattedSource\n ? `Package file ${formattedSource} differs from workspace file ${workspacePath}`\n : `File ${workspacePath} differs from package version`;\n\n return prompt.select<'overwrite' | 'skip'>(\n message,\n [\n { title: 'Overwrite (use package version)', value: 'overwrite' },\n { title: 'Skip (keep workspace version)', value: 'skip' }\n ]\n );\n}\n\n// ============================================================================\n// Private: content-difference check\n// ============================================================================\n\nasync function hasContentDifference(absPath: string, newContent: string): Promise<boolean> {\n try {\n if (!(await exists(absPath))) return false;\n const existing = await readTextFile(absPath, 'utf8');\n if (existing === newContent) return false;\n const [existingHash, newHash] = await Promise.all([\n calculateFileHash(existing),\n calculateFileHash(newContent)\n ]);\n return existingHash !== newHash;\n } catch (error) {\n logger.warn(`Failed to check content difference for ${absPath}: ${error}`);\n return true; // assume differs on error \u2014 safer to prompt than to silently skip\n }\n}\n\n// ============================================================================\n// Private: derive a namespaced path for a target\n// ============================================================================\n\n/**\n * Generate a prefix-based namespaced path for a target.\n *\n * Prepends `slug-` to the leaf filename (or the parent directory for\n * marker-based resources like skills).\n *\n * Examples:\n * relPath=\"rules/foo.mdc\", slug=\"acme\", flowToPattern=\"rules/**\"\n * \u2192 \"rules/acme-foo.mdc\"\n *\n * relPath=\".cursor/rules/my-rule.mdc\", slug=\"corp\", flowToPattern=\".cursor/rules/**\"\n * \u2192 \".cursor/rules/corp-my-rule.mdc\"\n *\n * relPath=\"commands/review/SKILL.md\", slug=\"pkg-a\", flowToPattern=\"commands/**\"\n * \u2192 \"commands/pkg-a-review/SKILL.md\"\n *\n * When the leaf name (sans extension) equals slug, the path is returned\n * unchanged (dedup rule).\n */\nexport function generateNamespacedPath(\n relPath: string,\n packageName: string,\n flowToPattern: string | undefined\n): string {\n return generatePrefixedLeafPath(relPath, packageName, flowToPattern);\n}\n\n// ============================================================================\n// Private: update owner's workspace index after a namespace move\n// ============================================================================\n\nasync function updateOwnerIndexAfterRename(\n cwd: string,\n owner: WorkspaceConflictOwner,\n oldRelPath: string,\n newRelPath: string,\n indexByPackage: Map<string, PackageIndexRecord>,\n indexWriteCollector?: IndexWriteCollector\n): Promise<void> {\n const normalizedOld = normalizePathForProcessing(oldRelPath);\n const normalizedNew = normalizePathForProcessing(newRelPath);\n const record = indexByPackage.get(owner.packageName);\n if (!record) return;\n\n if (owner.type === 'file') {\n const values = record.files[owner.key];\n if (!values) return;\n const idx = values.findIndex(mapping => {\n const target = getTargetPath(mapping);\n return normalizePathForProcessing(target) === normalizedOld;\n });\n if (idx === -1) return;\n const oldMapping = values[idx];\n values[idx] = typeof oldMapping === 'string'\n ? normalizedNew\n : { ...oldMapping, target: normalizedNew };\n }\n // dir-key owners: directory key is still valid after rename, nothing to change.\n\n // Defer to collector if present (parallel install mode)\n if (indexWriteCollector) {\n indexWriteCollector.recordFileMappingRename({\n packageName: owner.packageName,\n indexKey: owner.key,\n oldTargetPath: normalizedOld,\n newTargetPath: normalizedNew,\n entrySnapshot: {\n path: record.path,\n version: record.workspace?.version,\n files: record.files,\n },\n });\n return;\n }\n\n // Persist to workspace index\n const wsRecord = await readWorkspaceIndex(cwd);\n wsRecord.index.packages = wsRecord.index.packages ?? {};\n const entry = wsRecord.index.packages[record.packageName];\n\n const rawPath =\n entry?.path ??\n record.path ??\n (record.workspace?.version\n ? join(getRegistryDirectories().packages, record.packageName, record.workspace.version, sep)\n : '');\n if (!rawPath) {\n logger.warn(`Skipping workspace index write for ${record.packageName}: source path unknown`);\n return;\n }\n\n const pathToUse = formatPathForYaml(rawPath, cwd);\n wsRecord.index.packages[record.packageName] = {\n ...entry,\n path: pathToUse,\n version: entry?.version ?? record.workspace?.version,\n files: sortMapping(record.files ?? {})\n };\n\n await writeWorkspaceIndex(wsRecord);\n}\n\n// ============================================================================\n// Public API\n// ============================================================================\n\n/**\n * Build the ownership context needed for conflict checks on a single install run.\n *\n * @param cwd - Workspace root\n * @param packageName - Package being installed (excluded from \"other owners\")\n * @param previousRecord - Workspace-index record for this package from the previous install,\n * or null if this is a fresh install.\n */\nexport async function buildOwnershipContext(\n cwd: string,\n packageName: string,\n previousRecord: { files: Record<string, (string | WorkspaceIndexFileMapping)[]> } | null,\n wsRecord?: WorkspaceIndexRecord | null\n): Promise<OwnershipContext> {\n const otherIndexes = wsRecord\n ? loadOtherPackageIndexesFromRecord(wsRecord, packageName)\n : await loadOtherPackageIndexes(cwd, packageName);\n const expandedIndexes = await buildExpandedIndexesContext(cwd, otherIndexes);\n\n // Build previousOwnedPaths from the caller-supplied record (avoids reading the index again)\n const previousIndex: PackageIndexRecord | null = previousRecord\n ? {\n path: '',\n packageName,\n workspace: { version: '' },\n files: previousRecord.files\n }\n : null;\n const previousOwnedPaths = await expandIndexToFilePaths(cwd, previousIndex);\n\n const indexByPackage = new Map<string, PackageIndexRecord>();\n for (const rec of otherIndexes) {\n indexByPackage.set(rec.packageName, rec);\n }\n\n return { expandedIndexes, previousOwnedPaths, indexByPackage };\n}\n\n/**\n * Classify a single target path as conflict-free, owned-by-another-package, or\n * existing-on-disk-but-unowned.\n */\nexport function classifyFileConflict(\n targetRelPath: string,\n ownershipContext: OwnershipContext\n): FileConflictInfo {\n const normalized = normalizePathForProcessing(targetRelPath);\n const owner = ownershipContext.expandedIndexes.installedPathOwners.get(normalized);\n\n if (owner) {\n return { type: 'owned-by-other', owner };\n }\n\n // Paths written by earlier platform iterations this run are treated as owned by\n // the current package \u2014 no conflict. Prevents duplicate namespaced files when\n // amp, kimi, replit (etc.) all target .agents/skills/.\n if (ownershipContext.currentRunWrittenPaths?.has(normalized)) {\n return { type: 'none' };\n }\n\n // For the 'exists-unowned' classification we only report the type here;\n // the caller must also check disk existence (and optionally content).\n if (!ownershipContext.previousOwnedPaths.has(normalized)) {\n return { type: 'exists-unowned' };\n }\n\n return { type: 'none' };\n}\n\n/**\n * Decide what to do for a single classified conflict.\n *\n * The decision cascade is:\n * 1. Per-path override (`options.conflictDecisions`)\n * 2. `--force` flag\n * 3. Configured strategy (`options.conflictStrategy`, when not 'ask')\n * 4. Default: 'namespace' for owned-by-other, TTY prompt for exists-unowned\n * 5. Non-interactive fallback: namespace (owned-by-other) / skip (exists-unowned)\n *\n * @param conflictType - 'owned-by-other' or 'exists-unowned'\n * @param relPath - Workspace-relative path (for display / per-path key lookup)\n * @param ownerName - Package name of the owner (only relevant for 'owned-by-other')\n * @param sourcePath - Package-relative source path (shown in prompts)\n * @param options - Install options carrying force / conflictStrategy / conflictDecisions\n * @param interactive - Whether prompts are allowed for conflict resolution\n * (derived from InteractionPolicy at the call site)\n * @param forceOverwrite - True when the package-level phase confirmed an overwrite for this pkg\n */\nexport async function resolveFileConflict(\n conflictType: 'owned-by-other' | 'exists-unowned',\n relPath: string,\n ownerName: string | undefined,\n sourcePath: string | undefined,\n options: InstallOptions,\n interactive: boolean,\n forceOverwrite: boolean,\n prompt?: PromptPort\n): Promise<{ decision: ConflictResolution; warning?: string }> {\n const perPathDecisions = options.conflictDecisions ?? {};\n const normalized = normalizePathForProcessing(relPath);\n\n // 1. Per-path override\n const perPath = perPathDecisions[normalized] ?? perPathDecisions[relPath];\n if (perPath) {\n return { decision: perPath as ConflictResolution };\n }\n\n // 2. Force flag or package-level force-overwrite confirmation\n if (options.force || forceOverwrite) {\n // --force still namespaces owned-by-other (both packages keep their file);\n // for unowned files --force overwrites since there's no package to namespace to.\n const resolution: ConflictResolution =\n conflictType === 'owned-by-other' ? 'namespace' : 'overwrite';\n const warning = conflictType === 'owned-by-other'\n ? `Namespacing ${normalized} (owned by ${ownerName}) due to --force.`\n : `Overwriting ${normalized} (content differs, --force active).`;\n return { decision: resolution, warning };\n }\n\n const strategy = options.conflictStrategy;\n\n // 3. Explicit non-'ask' strategy\n if (strategy && strategy !== 'ask') {\n const decision = strategy as ConflictResolution;\n let warning: string | undefined;\n if (decision === 'skip') {\n warning = conflictType === 'owned-by-other'\n ? `Skipping ${normalized} (owned by ${ownerName}) due to conflict strategy '${strategy}'.`\n : `Skipping ${normalized} (content differs) due to conflict strategy '${strategy}'.`;\n } else if (decision === 'overwrite' && conflictType === 'exists-unowned') {\n warning = `Overwriting ${normalized} (content differs) due to conflict strategy '${strategy}'.`;\n }\n return { decision, warning };\n }\n\n // 4a. Default for owned-by-other: always namespace (no prompt needed)\n if (conflictType === 'owned-by-other') {\n return { decision: 'namespace' };\n }\n\n // 4b. exists-unowned: prompt in interactive mode\n if (interactive) {\n const p = prompt ?? resolvePrompt();\n const decision = await promptContentDifferenceResolution(normalized, p, sourcePath);\n return { decision };\n }\n\n // 5. Non-interactive fallback for exists-unowned: skip\n const warning = `Skipping ${normalized} (content differs) \u2014 non-interactive mode.`;\n return { decision: 'skip', warning };\n}\n\n/**\n * Execute the namespace strategy for an `owned-by-other` conflict:\n * moves the existing file (owned by another package) into its own namespace\n * subdirectory and updates that package's workspace index entry.\n *\n * The incoming file's namespaced path is computed separately via\n * `generateNamespacedPath()` and is NOT written here \u2014 that is handled\n * by the flow executor after the target entries are rewritten.\n *\n * @param ownerNamespaceSlug The derived short slug for the owner package\n * (computed via deriveNamespaceSlug).\n */\nexport async function executeNamespace(\n cwd: string,\n targetRelPath: string,\n owner: WorkspaceConflictOwner,\n ownershipContext: OwnershipContext,\n flowToPattern: string | undefined,\n dryRun: boolean,\n ownerNamespaceSlug: string,\n indexWriteCollector?: IndexWriteCollector\n): Promise<{ ownerNamespacedPath: string; warning: string }> {\n const normalized = normalizePathForProcessing(targetRelPath);\n const ownerNamespacedPath = generateNamespacedPath(normalized, ownerNamespaceSlug, flowToPattern);\n\n if (dryRun) {\n return {\n ownerNamespacedPath,\n warning: `Would move ${normalized} (owned by ${owner.packageName}) \u2192 ${ownerNamespacedPath} to make room for incoming namespaced file.`\n };\n }\n\n const absTarget = join(cwd, normalized);\n const absNamespaced = join(cwd, ownerNamespacedPath);\n await ensureDir(dirname(absNamespaced));\n await fs.rename(absTarget, absNamespaced);\n\n await updateOwnerIndexAfterRename(\n cwd,\n owner,\n normalized,\n ownerNamespacedPath,\n ownershipContext.indexByPackage,\n indexWriteCollector\n );\n\n // Update in-memory ownership map so subsequent targets in this run see the move\n ownershipContext.expandedIndexes.installedPathOwners.delete(normalized);\n ownershipContext.expandedIndexes.installedPathOwners.set(\n normalizePathForProcessing(ownerNamespacedPath),\n owner\n );\n\n return {\n ownerNamespacedPath,\n warning: `Moved ${normalized} (owned by ${owner.packageName}) \u2192 ${ownerNamespacedPath} (namespaced).`\n };\n}\n\n/**\n * Resolve conflicts for a batch of target paths.\n *\n * ### Namespacing strategy (default)\n *\n * This function uses a **two-pass** approach for the `namespace` strategy:\n *\n * **Pass 1 \u2014 detection:**\n * Iterate all targets and classify each. If ANY non-merge-flow target has a\n * conflict (`owned-by-other` OR `exists-unowned`), mark the entire package\n * for bulk namespacing.\n *\n * **Pass 2 \u2014 application:**\n * - Rewrite ALL non-merge-flow target paths to their namespaced form so that\n * the executor writes every file for this package under\n * `<base>/<packageName>/<rest>`.\n * - For `owned-by-other` conflicts: also physically move the existing owner's\n * file into its own namespace and update the owner's workspace index.\n * - For `exists-unowned` conflicts: leave the existing unowned file in place\n * (it is the \"original\"); only the incoming file is namespaced (which is\n * already the case from the bulk rewrite).\n *\n * ### Non-namespace strategies\n * When the resolved strategy is `skip` or `overwrite` (set via\n * `--conflicts skip|overwrite` or per-path overrides), the original\n * per-file behaviour is preserved and no bulk namespacing occurs.\n *\n * @param cwd - Workspace root\n * @param targets - Pre-computed target entries (from computeTargetEntries)\n * @param ownershipContext - Ownership context built by buildOwnershipContext\n * @param options - Install options\n * @param installingPackageName - Name of the package being installed (used as namespace dir)\n * @param forceOverwrite - True when the package-level phase confirmed an overwrite\n */\nexport async function resolveConflictsForTargets(\n cwd: string,\n targets: TargetEntry[],\n ownershipContext: OwnershipContext,\n options: InstallOptions,\n installingPackageName: string,\n forceOverwrite: boolean = false,\n prompt?: PromptPort,\n canPrompt?: boolean,\n indexWriteCollector?: IndexWriteCollector,\n persistedNamespace?: string\n): Promise<ConflictResolutionResult> {\n const warnings: string[] = [];\n const claimedFiles: string[] = [];\n const interactive = canPrompt ?? options.interactive ?? false;\n const isDryRun = Boolean(options.dryRun);\n\n // -------------------------------------------------------------------------\n // Compute namespace slugs for all known packages\n // -------------------------------------------------------------------------\n\n // Build the set of slugs already claimed by other installed packages\n const otherPackageNames = Array.from(ownershipContext.indexByPackage.keys());\n const slugByPackageName = new Map<string, string>();\n const existingSlugs = new Set<string>();\n\n for (const name of otherPackageNames) {\n const slug = deriveNamespaceSlug(name, existingSlugs);\n slugByPackageName.set(name, slug);\n existingSlugs.add(slug);\n }\n\n // Check if the workspace index has a persisted namespace for this package\n // (from a previous --namespace install). This makes sync/reinstall re-apply\n // the same prefix automatically.\n let namespaceOption = options.namespace;\n if (namespaceOption === undefined) {\n if (persistedNamespace) {\n namespaceOption = persistedNamespace;\n } else {\n try {\n const wsRecord = await readWorkspaceIndex(cwd);\n const existingEntry = wsRecord.index.packages?.[installingPackageName];\n if (existingEntry?.namespace) {\n namespaceOption = existingEntry.namespace;\n }\n } catch {\n // Best-effort; if read fails, continue without persisted namespace\n }\n }\n }\n\n // Derive the installing package's slug (avoiding collisions with other packages)\n const installingSlug = typeof namespaceOption === 'string'\n ? namespaceOption\n : deriveNamespaceSlug(installingPackageName, existingSlugs);\n\n // -------------------------------------------------------------------------\n // --namespace / --no-namespace handling\n // -------------------------------------------------------------------------\n\n // --no-namespace: never namespace, skip directly to pass 2 with no bulk flag\n if (namespaceOption === false) {\n const allowedTargets: TargetEntry[] = [];\n const relocatedFiles: RelocatedFile[] = [];\n for (const target of targets) {\n const classification = classifyFileConflict(target.relPath, ownershipContext);\n if (classification.type === 'owned-by-other') {\n // Can't install both to the same path \u2014 skip with warning\n warnings.push(\n `Skipping ${target.relPath} (owned by ${classification.owner!.packageName}) \u2014 --no-namespace active.`\n );\n continue;\n }\n allowedTargets.push(target);\n }\n return {\n allowedTargets,\n warnings,\n packageWasNamespaced: false,\n relocatedFiles,\n claimedFiles\n };\n }\n\n // -------------------------------------------------------------------------\n // Pass 1: Classify all targets to determine whether bulk namespacing applies\n // -------------------------------------------------------------------------\n\n type Classification =\n | { type: 'none' }\n | { type: 'owned-by-other'; owner: WorkspaceConflictOwner;\n decision: ConflictResolution; warning?: string }\n | { type: 'exists-unowned';\n decision: ConflictResolution; warning?: string };\n\n const classifications: Classification[] = [];\n // --namespace (true or string): force bulk namespace, skip detection\n let shouldNamespacePackage = namespaceOption === true || typeof namespaceOption === 'string';\n\n for (const target of targets) {\n // Merge-flow targets are never namespaced\n if (target.isMergeFlow) {\n classifications.push({ type: 'none' });\n continue;\n }\n\n const classification = classifyFileConflict(target.relPath, ownershipContext);\n\n if (classification.type === 'none') {\n classifications.push({ type: 'none' });\n continue;\n }\n\n if (classification.type === 'owned-by-other') {\n // Resolve once and store the decision for replay in Pass 2\n const { decision, warning } = await resolveFileConflict(\n 'owned-by-other',\n target.relPath,\n classification.owner!.packageName,\n undefined,\n options,\n interactive,\n forceOverwrite,\n prompt\n );\n\n classifications.push({ type: 'owned-by-other', owner: classification.owner!, decision, warning });\n\n if (decision === 'namespace') {\n shouldNamespacePackage = true;\n }\n continue;\n }\n\n // exists-unowned\n const absTarget = join(cwd, normalizePathForProcessing(target.relPath));\n const fileExists = await exists(absTarget);\n if (!fileExists) {\n classifications.push({ type: 'none' });\n continue;\n }\n\n // Check content difference \u2014 three-tier cascade:\n // 1. `content` (eager, set by caller)\n // 2. `resolveOutputContent()` (lazy, transformed output)\n // 3. `sourceAbsPath` read (lazy, raw source \u2014 pass-through fallback)\n let contentToCompare = target.content;\n if (contentToCompare === undefined && target.resolveOutputContent) {\n try {\n contentToCompare = await target.resolveOutputContent();\n } catch {\n // Callback failed \u2014 fall through to sourceAbsPath\n }\n }\n if (contentToCompare === undefined && target.sourceAbsPath) {\n try {\n contentToCompare = await readTextFile(target.sourceAbsPath, 'utf8');\n } catch {\n // Read failed \u2014 leave undefined, fall through to exists-unowned\n }\n }\n if (contentToCompare !== undefined) {\n const contentDiffers = await hasContentDifference(absTarget, contentToCompare);\n if (!contentDiffers) {\n claimedFiles.push(target.relPath);\n classifications.push({ type: 'none' });\n continue;\n }\n }\n\n // Resolve once and store the decision for replay in Pass 2\n const { decision, warning } = await resolveFileConflict(\n 'exists-unowned',\n target.relPath,\n undefined,\n undefined,\n options,\n interactive,\n forceOverwrite,\n prompt\n );\n\n classifications.push({ type: 'exists-unowned', decision, warning });\n\n if (decision === 'namespace') {\n shouldNamespacePackage = true;\n }\n }\n\n // -------------------------------------------------------------------------\n // Pass 2: Apply resolutions\n // -------------------------------------------------------------------------\n\n const allowedTargets: TargetEntry[] = [];\n const relocatedFiles: RelocatedFile[] = [];\n\n for (let i = 0; i < targets.length; i++) {\n const target = targets[i];\n const cls = classifications[i];\n\n // \u2500\u2500 No conflict (or merge-flow) \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n if (cls.type === 'none') {\n if (shouldNamespacePackage && !target.isMergeFlow) {\n // Bulk namespacing: rewrite this non-conflicting file's path too\n const namespacedRel = generateNamespacedPath(\n target.relPath,\n installingSlug,\n target.flowToPattern\n );\n const namespacedAbs = join(cwd, namespacedRel);\n allowedTargets.push({ ...target, relPath: namespacedRel, absPath: namespacedAbs });\n } else {\n allowedTargets.push(target);\n }\n continue;\n }\n\n // \u2500\u2500 owned-by-other \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n if (cls.type === 'owned-by-other') {\n const { decision, warning } = cls;\n if (warning) warnings.push(warning);\n\n if (decision === 'skip') continue;\n\n if (decision === 'namespace') {\n try {\n // Move the existing owner's file into its namespace\n const ownerSlug = slugByPackageName.get(cls.owner.packageName)\n ?? deriveNamespaceSlug(cls.owner.packageName, existingSlugs);\n const { ownerNamespacedPath, warning: nsWarn } = await executeNamespace(\n cwd,\n target.relPath,\n cls.owner,\n ownershipContext,\n target.flowToPattern,\n isDryRun,\n ownerSlug,\n indexWriteCollector\n );\n warnings.push(nsWarn);\n relocatedFiles.push({ from: normalizePathForProcessing(target.relPath), to: ownerNamespacedPath });\n\n // Rewrite this incoming target to its namespaced path\n const namespacedRel = generateNamespacedPath(\n target.relPath,\n installingSlug,\n target.flowToPattern\n );\n const namespacedAbs = join(cwd, namespacedRel);\n allowedTargets.push({ ...target, relPath: namespacedRel, absPath: namespacedAbs });\n } catch (error) {\n warnings.push(`Failed to namespace ${target.relPath}: ${error}`);\n // Do not add to allowedTargets on failure\n }\n continue;\n }\n\n // overwrite: allow flow to write the file; clear in-memory owner to avoid re-prompting\n if (!isDryRun) {\n ownershipContext.expandedIndexes.installedPathOwners.delete(\n normalizePathForProcessing(target.relPath)\n );\n }\n if (isDryRun) {\n warnings.push(`Would overwrite ${target.relPath} (currently owned by ${cls.owner.packageName}).`);\n }\n allowedTargets.push(target);\n continue;\n }\n\n // \u2500\u2500 exists-unowned \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n if (cls.type === 'exists-unowned') {\n const { decision, warning } = cls;\n if (warning) warnings.push(warning);\n\n if (decision === 'skip') continue;\n\n if (decision === 'namespace') {\n // Leave the existing unowned file in place; only namespace the incoming file\n const namespacedRel = generateNamespacedPath(\n target.relPath,\n installingSlug,\n target.flowToPattern\n );\n const namespacedAbs = join(cwd, namespacedRel);\n if (isDryRun) {\n warnings.push(\n `Would install ${target.relPath} as ${namespacedRel} ` +\n `(existing unowned file kept at original path).`\n );\n } else {\n warnings.push(\n `Installed as ${namespacedRel} ` +\n `(existing unowned file kept at ${target.relPath}).`\n );\n }\n allowedTargets.push({ ...target, relPath: namespacedRel, absPath: namespacedAbs });\n continue;\n }\n\n // overwrite or dry-run\n if (isDryRun) {\n warnings.push(`Would overwrite existing local file ${target.relPath} (content differs).`);\n }\n allowedTargets.push(target);\n continue;\n }\n }\n\n return {\n allowedTargets,\n warnings,\n packageWasNamespaced: shouldNamespacePackage,\n namespaceDir: shouldNamespacePackage ? installingSlug : undefined,\n relocatedFiles,\n claimedFiles\n };\n}\n", "import { join, dirname } from 'path';\nimport * as yaml from 'js-yaml';\nimport { FILE_PATTERNS } from '../constants/index.js';\nimport { exists, readTextFile, writeTextFile, ensureDir } from './fs.js';\nimport { getLocalOpenPackageDir, getLocalPackageContentDir } from './paths.js';\nimport { normalizePathForProcessing } from './path-normalization.js';\nimport { logger } from './logger.js';\n\nconst HEADER_COMMENT = '# This file is managed by OpenPackage. Do not edit manually.';\n\nexport type PackageIndexLocation = 'root' | 'nested';\n\nexport interface PackageIndexWorkspace {\n hash?: string;\n version: string;\n}\n\nexport interface PackageIndexData {\n workspace: PackageIndexWorkspace;\n files: Record<string, string[]>;\n}\n\nexport interface PackageIndexRecord extends PackageIndexData {\n path: string;\n packageName: string;\n}\n\nexport function getPackageIndexPath(\n cwd: string,\n packageName: string,\n location: PackageIndexLocation = 'nested'\n): string {\n if (location === 'root') {\n return join(getLocalOpenPackageDir(cwd), FILE_PATTERNS.OPENPACKAGE_INDEX_YML);\n }\n\n // Nested: cached package root (cwd/.openpackage/packages/<name>/)\n const contentDir = getLocalPackageContentDir(cwd, packageName);\n return join(contentDir, FILE_PATTERNS.OPENPACKAGE_INDEX_YML);\n}\n\nexport function ensureTrailingSlash(value: string): string {\n return value.endsWith('/') ? value : `${value}/`;\n}\n\nexport function sortMapping(record: Record<string, any[]>): Record<string, any[]> {\n const sortedKeys = Object.keys(record).sort();\n const normalized: Record<string, any[]> = {};\n for (const key of sortedKeys) {\n const values = record[key] || [];\n const hasComplex = values.some(v => typeof v === 'object' && v !== null);\n if (hasComplex) {\n // Complex mappings - sort by target path\n const sorted = [...values].sort((a, b) => {\n const targetA = typeof a === 'string' ? a : a.target;\n const targetB = typeof b === 'string' ? b : b.target;\n return targetA.localeCompare(targetB);\n });\n // Dedupe by target\n const seen = new Set<string>();\n normalized[key] = sorted.filter(item => {\n const target = typeof item === 'string' ? item : item.target;\n if (seen.has(target)) return false;\n seen.add(target);\n return true;\n });\n } else {\n // Simple string array\n const sortedValues = [...new Set(values)].sort();\n normalized[key] = sortedValues;\n }\n }\n return normalized;\n}\n\nexport function sanitizeIndexData(data: any): PackageIndexData | null {\n if (!data || typeof data !== 'object') return null;\n\n let workspaceVer: string | undefined;\n let workspaceHash: string | undefined;\n\n const workspaceSection = (data as { workspace?: unknown }).workspace;\n if (workspaceSection && typeof workspaceSection === 'object') {\n const maybeVersion = (workspaceSection as { version?: unknown }).version;\n if (typeof maybeVersion === 'string') {\n workspaceVer = maybeVersion;\n }\n const maybeHash = (workspaceSection as { hash?: unknown }).hash;\n if (typeof maybeHash === 'string') {\n workspaceHash = maybeHash;\n }\n }\n\n if (typeof workspaceVer !== 'string') return null;\n\n const files = (data as { files?: unknown }).files;\n if (!files || typeof files !== 'object') return null;\n\n const entries: Record<string, string[]> = {};\n for (const [rawKey, rawValue] of Object.entries(files as Record<string, unknown>)) {\n if (typeof rawKey !== 'string') continue;\n if (!Array.isArray(rawValue)) continue;\n\n const cleanedValues = rawValue\n .filter((entry): entry is string => typeof entry === 'string' && entry.trim().length > 0)\n .map(value => normalizePathForProcessing(value));\n\n entries[normalizePathForProcessing(rawKey)] = cleanedValues;\n }\n\n return {\n workspace: {\n version: workspaceVer,\n hash: workspaceHash\n },\n files: sortMapping(entries)\n };\n}\n\nexport async function readPackageIndex(\n cwd: string,\n packageName: string,\n location: PackageIndexLocation = 'nested'\n): Promise<PackageIndexRecord | null> {\n const canonicalPath = getPackageIndexPath(cwd, packageName, location);\n const indexPath = canonicalPath;\n\n if (!(await exists(indexPath))) {\n return null;\n }\n\n try {\n const content = await readTextFile(indexPath);\n const parsed = yaml.load(content) as any;\n const sanitized = sanitizeIndexData(parsed);\n if (!sanitized) {\n logger.warn(`Invalid package index detected at ${indexPath}, will repair on write.`);\n return {\n path: indexPath,\n packageName,\n workspace: { version: '', hash: undefined },\n files: {}\n };\n }\n return {\n path: canonicalPath,\n packageName,\n workspace: sanitized.workspace,\n files: sanitized.files\n };\n } catch (error) {\n logger.warn(`Failed to read package index at ${indexPath}: ${error}`);\n return {\n path: canonicalPath,\n packageName,\n workspace: { version: '', hash: undefined },\n files: {}\n };\n }\n}\n\nexport async function writePackageIndex(record: PackageIndexRecord): Promise<void> {\n const { path: indexPath, files } = record;\n const workspaceVer = record.workspace?.version;\n if (!workspaceVer) {\n throw new Error(`workspace.version is required when writing ${FILE_PATTERNS.OPENPACKAGE_INDEX_YML}`);\n }\n const workspace: PackageIndexWorkspace = {\n hash: record.workspace?.hash,\n version: workspaceVer\n };\n await ensureDir(dirname(indexPath));\n\n const normalizedFiles = sortMapping(files);\n const body = yaml.dump(\n {\n workspace,\n files: normalizedFiles\n },\n {\n lineWidth: 120,\n sortKeys: true\n }\n );\n\n const serialized = `${HEADER_COMMENT}\\n\\n${body}`;\n await writeTextFile(indexPath, serialized);\n}\n\nexport function isDirKey(key: string): boolean {\n return key.endsWith('/');\n}\n\n/**\n * Prune nested child directories if their parent directory is already present.\n * Example: keep \"skills/nestjs/\" and drop \"skills/nestjs/examples/\".\n */\nexport function pruneNestedDirectories(dirs: string[]): string[] {\n const sorted = [...dirs].sort((a, b) => {\n if (a.length === b.length) {\n return a.localeCompare(b);\n }\n return a.length - b.length;\n });\n\n const pruned: string[] = [];\n for (const dir of sorted) {\n const hasParent = pruned.some(parent => dir !== parent && dir.startsWith(parent));\n if (!hasParent) {\n pruned.push(dir);\n }\n }\n return pruned;\n}\n\n", "/**\n * Prefix-Based Resource Namespace Path Generation\n *\n * Generates namespaced paths by prepending `slug-` to the leaf segment\n * (file or marker-bounded directory) rather than inserting a subdirectory.\n *\n * This approach ensures flat name discovery (e.g. Claude Code's leaf-name\n * identity) can distinguish resources from different packages.\n */\n\nimport { stripExtension } from '../../resources/resource-naming.js';\nimport { getInstallableTypes } from '../../resources/resource-registry.js';\n\n// Module-level constant: marker filenames derived from the resource registry.\nconst MARKER_FILENAMES: Set<string> = new Set(\n getInstallableTypes().map(d => d.marker).filter(Boolean) as string[]\n);\n\n/**\n * Dedup rule: returns true when prefixing would be redundant because the\n * leaf name (sans extension) already equals the slug.\n *\n * Example: slug=\"code-review\", leaf=\"code-review.md\" \u2192 skip prefix.\n */\nexport function shouldSkipPrefix(leafName: string, slug: string): boolean {\n return stripExtension(leafName) === slug;\n}\n\n/**\n * Derive the base directory from a flow `to` pattern \u2014 the longest\n * non-glob prefix directory. Mirrors the algorithm previously in\n * `generateNamespacedPath`.\n */\nfunction deriveBaseDir(flowToPattern: string | undefined): string {\n if (!flowToPattern) return '';\n\n const patternNorm = flowToPattern.replace(/\\\\/g, '/');\n const firstGlob = patternNorm.search(/[*?{]/);\n\n if (firstGlob > 0) {\n const prefix = patternNorm.slice(0, firstGlob).replace(/\\/$/, '');\n if (patternNorm[firstGlob - 1]?.match(/\\//)) {\n return prefix;\n }\n return prefix.includes('/') ? prefix.slice(0, prefix.lastIndexOf('/')) : '';\n }\n\n if (firstGlob === -1) {\n // Literal pattern \u2014 base is the directory of the literal target\n const lastSlash = patternNorm.lastIndexOf('/');\n return lastSlash >= 0 ? patternNorm.slice(0, lastSlash) : '';\n }\n\n return '';\n}\n\n/**\n * Generate a prefix-based namespaced leaf path.\n *\n * Instead of inserting a subdirectory (`rules/acme/foo.md`), this prepends\n * `slug-` to the leaf: `rules/acme-foo.md`.\n *\n * For marker-based resources (e.g. skills with SKILL.md), the parent\n * directory is prefixed instead of the file:\n * `commands/review/SKILL.md` \u2192 `commands/pkg-a-review/SKILL.md`\n *\n * Dedup rule: if the leaf name (sans extension) already equals the slug,\n * the path is returned unchanged.\n *\n * @param relPath Workspace-relative path (forward-slash normalised)\n * @param slug Short namespace slug (e.g. \"acme\", \"my-pkg\")\n * @param flowToPattern The resolved flow `to` pattern (used to derive base dir)\n */\nexport function generatePrefixedLeafPath(\n relPath: string,\n slug: string,\n flowToPattern: string | undefined\n): string {\n const normalized = relPath.replace(/\\\\/g, '/');\n\n // Derive base directory from flow pattern\n const baseDir = deriveBaseDir(flowToPattern);\n\n // Split path into base + remainder\n let remainder: string;\n if (baseDir) {\n const baseDirSlash = baseDir.endsWith('/') ? baseDir : `${baseDir}/`;\n if (normalized.startsWith(baseDirSlash)) {\n remainder = normalized.slice(baseDirSlash.length);\n } else if (normalized === baseDir) {\n // Edge case: path equals base dir exactly\n return `${baseDir}`;\n } else {\n // Fallback if base doesn't match\n remainder = normalized;\n }\n } else {\n remainder = normalized;\n }\n\n const parts = remainder.split('/');\n const leaf = parts[parts.length - 1];\n\n // Check if the leaf is a marker file (e.g. SKILL.md)\n if (MARKER_FILENAMES.has(leaf) && parts.length >= 2) {\n // Marker-based resource: prefix the parent directory\n // e.g. parts = [\"review\", \"SKILL.md\"] \u2192 [\"slug-review\", \"SKILL.md\"]\n // or parts = [\"review\", \"agents\", \"x.md\"] \u2192 [\"slug-review\", \"agents\", \"x.md\"]\n // Find the marker boundary \u2014 the directory just before the marker file\n // For nested files under the skill dir, the boundary is the first segment\n // that represents the skill directory name.\n const parentIdx = 0; // The skill directory is always the first segment under the base\n const parentDir = parts[parentIdx];\n\n if (shouldSkipPrefix(parentDir, slug)) {\n return baseDir ? `${baseDir}/${remainder}` : remainder;\n }\n\n parts[parentIdx] = `${slug}-${parentDir}`;\n const newRemainder = parts.join('/');\n return baseDir ? `${baseDir}/${newRemainder}` : newRemainder;\n }\n\n // File-based prefixing: prepend slug to leaf filename\n if (shouldSkipPrefix(leaf, slug)) {\n return baseDir ? `${baseDir}/${remainder}` : remainder;\n }\n\n if (parts.length === 1) {\n // Single-segment remainder (just a filename)\n return baseDir ? `${baseDir}/${slug}-${leaf}` : `${slug}-${leaf}`;\n }\n\n // Multi-segment: prefix the leaf (last segment)\n const parentParts = parts.slice(0, -1);\n const prefixedLeaf = `${slug}-${leaf}`;\n const newRemainder = [...parentParts, prefixedLeaf].join('/');\n return baseDir ? `${baseDir}/${newRemainder}` : newRemainder;\n}\n", "import { dirname, join, relative, sep } from 'path';\n\nimport {\n exists,\n ensureDir,\n listDirectories,\n listFiles,\n remove,\n removeEmptyDirectories,\n walkFiles,\n} from '../../utils/fs.js';\nimport { packageManager } from '../package.js';\nimport { getRegistryDirectories } from '../directory.js';\nimport { logger } from '../../utils/logger.js';\nimport {\n FILE_PATTERNS,\n} from '../../constants/index.js';\nimport { getPlatformRootFileNames } from '../platform/platform-root-files.js';\nimport type { Platform } from '../platforms.js';\nimport { getPlatformsState } from '../platforms.js';\nimport { normalizePathForProcessing } from '../../utils/path-normalization.js';\nimport { formatPathForYaml } from '../../utils/path-resolution.js';\nimport {\n isAllowedRegistryPath,\n isRootRegistryPath,\n isSkippableRegistryPath,\n normalizeRegistryPath,\n extractUniversalSubdirInfo\n} from '../platform/registry-entry-filter.js';\nimport { mapUniversalToPlatform } from '../platform/platform-mapper.js';\nimport type { PackageFile } from '../../types/index.js';\nimport { mergeInlinePlatformOverride } from '../platform-yaml-merge.js';\nimport { parseUniversalPath } from '../platform/platform-file.js';\nimport { getPlatformDefinition, deriveRootDirFromFlows } from '../platforms.js';\nimport {\n sortMapping,\n ensureTrailingSlash,\n isDirKey,\n pruneNestedDirectories\n} from '../../utils/package-index-yml.js';\nimport {\n getWorkspaceIndexPath,\n readWorkspaceIndex,\n type WorkspaceIndexRecord,\n} from '../../utils/workspace-index-yml.js';\nimport {\n type WorkspaceConflictOwner\n} from '../../utils/workspace-index-ownership.js';\nimport { getTargetPath } from '../../utils/workspace-index-helpers.js';\nimport type { WorkspaceIndexFileMapping } from '../../types/workspace-index.js';\n\n// ============================================================================\n// Types and Interfaces\n// ============================================================================\n\ntype PackageIndexLocation = 'root' | 'nested';\n\ninterface PackageIndexRecord {\n path: string;\n packageName: string;\n workspace: {\n version: string;\n hash?: string;\n };\n files: Record<string, (string | WorkspaceIndexFileMapping)[]>;\n}\n\ninterface RegistryFileEntry {\n registryPath: string;\n content: string;\n encoding?: string;\n}\n\ninterface PlannedTarget {\n absPath: string;\n relPath: string;\n platform?: Platform | 'other';\n}\n\ninterface PlannedFile {\n registryPath: string;\n content: string;\n encoding?: string;\n targets: PlannedTarget[];\n}\n\ninterface GroupPlan {\n key: string;\n plannedFiles: PlannedFile[];\n decision: 'dir' | 'file';\n platformDecisions: Map<Platform | 'other', 'dir' | 'file'>;\n targetDirs: Set<string>;\n}\n\ntype ConflictOwner = WorkspaceConflictOwner;\n\nexport interface ExpandedIndexesContext {\n dirKeyOwners: Map<string, ConflictOwner[]>;\n installedPathOwners: Map<string, ConflictOwner>;\n}\n\ninterface PlannedTargetDetail {\n absPath: string;\n relPath: string;\n content: string;\n encoding?: string;\n sourcePath?: string;\n}\n\n// ============================================================================\n// Path and File Utilities\n// ============================================================================\n\nfunction normalizeRelativePath(cwd: string, absPath: string): string {\n const rel = relative(cwd, absPath);\n const normalized = normalizePathForProcessing(rel);\n return normalized.replace(/\\\\/g, '/');\n}\n\nexport function loadOtherPackageIndexesFromRecord(\n record: WorkspaceIndexRecord,\n excludePackage: string\n): PackageIndexRecord[] {\n const wsPath = record.path;\n const packages = record.index.packages ?? {};\n const results: PackageIndexRecord[] = [];\n\n for (const [name, entry] of Object.entries(packages)) {\n if (name === excludePackage) continue;\n results.push({\n path: entry?.path ?? wsPath,\n packageName: name,\n workspace: { version: entry?.version ?? '' },\n files: entry?.files ?? {}\n });\n }\n\n return results;\n}\n\nexport async function loadOtherPackageIndexes(\n cwd: string,\n excludePackage: string\n): Promise<PackageIndexRecord[]> {\n const record = await readWorkspaceIndex(cwd);\n return loadOtherPackageIndexesFromRecord(record, excludePackage);\n}\n\nasync function collectFilesUnderDirectory(cwd: string, dirRelPath: string): Promise<string[]> {\n const directoryRel = ensureTrailingSlash(normalizePathForProcessing(dirRelPath));\n const absDir = join(cwd, directoryRel);\n if (!(await exists(absDir))) {\n return [];\n }\n\n const collected: string[] = [];\n try {\n for await (const absFile of walkFiles(absDir)) {\n const relPath = normalizeRelativePath(cwd, absFile);\n collected.push(relPath);\n }\n } catch (error) {\n logger.warn(`Failed to enumerate directory ${absDir}: ${error}`);\n }\n return collected;\n}\n\nexport async function buildExpandedIndexesContext(\n cwd: string,\n indexes: PackageIndexRecord[]\n): Promise<ExpandedIndexesContext> {\n const dirKeyOwners = new Map<string, ConflictOwner[]>();\n const installedPathOwners = new Map<string, ConflictOwner>();\n\n for (const record of indexes) {\n for (const [rawKey, values] of Object.entries(record.files)) {\n const key = normalizePathForProcessing(rawKey);\n const owner: ConflictOwner = {\n packageName: record.packageName,\n key,\n type: key.endsWith('/') ? 'dir' : 'file'\n };\n\n if (owner.type === 'dir') {\n if (!dirKeyOwners.has(key)) {\n dirKeyOwners.set(key, []);\n }\n dirKeyOwners.get(key)!.push(owner);\n\n for (const mapping of values) {\n const dirRel = getTargetPath(mapping);\n const files = await collectFilesUnderDirectory(cwd, dirRel);\n for (const filePath of files) {\n if (!installedPathOwners.has(filePath)) {\n installedPathOwners.set(filePath, owner);\n }\n }\n }\n } else {\n for (const mapping of values) {\n const fileRel = getTargetPath(mapping);\n const normalizedValue = normalizePathForProcessing(fileRel);\n if (!installedPathOwners.has(normalizedValue)) {\n installedPathOwners.set(normalizedValue, owner);\n }\n }\n }\n }\n }\n\n return { dirKeyOwners, installedPathOwners };\n}\n\n// ============================================================================\n// Group Planning Functions\n// ============================================================================\n\nfunction deriveGroupKey(registryPath: string, cwd?: string): string {\n const normalized = normalizeRegistryPath(registryPath);\n const segments = normalized.split('/');\n if (segments.length <= 1) {\n return '';\n }\n\n const first = segments[0];\n const state = getPlatformsState(cwd ?? null);\n const universalSubdirs = state.universalSubdirs;\n\n if (universalSubdirs.has(first)) {\n if (segments.length >= 2) {\n return ensureTrailingSlash(`${segments[0]}/${segments[1]}`);\n }\n return ensureTrailingSlash(`${segments[0]}`);\n }\n\n const lastSlash = normalized.lastIndexOf('/');\n if (lastSlash === -1) return '';\n return ensureTrailingSlash(normalized.substring(0, lastSlash));\n}\n\nfunction createPlannedFiles(entries: RegistryFileEntry[]): PlannedFile[] {\n return entries.map(entry => ({\n registryPath: entry.registryPath,\n content: entry.content,\n encoding: entry.encoding,\n targets: []\n }));\n}\n\nfunction groupPlannedFiles(plannedFiles: PlannedFile[], cwd?: string): Map<string, PlannedFile[]> {\n const groups = new Map<string, PlannedFile[]>();\n for (const planned of plannedFiles) {\n const key = deriveGroupKey(planned.registryPath, cwd);\n if (!groups.has(key)) {\n groups.set(key, []);\n }\n groups.get(key)!.push(planned);\n }\n return groups;\n}\n\n// ============================================================================\n// Index Mapping Building Functions\n// ============================================================================\n\nfunction refreshGroupTargetDirs(plan: GroupPlan): void {\n plan.targetDirs = collectTargetDirectories(plan.plannedFiles);\n}\n\nfunction buildFallbackFileMappings(plan: GroupPlan): Record<string, string[]> {\n const mapping: Record<string, string[]> = {};\n\n for (const file of plan.plannedFiles) {\n if (file.targets.length === 0) continue;\n const values = Array.from(\n new Set(\n file.targets.map(target => normalizePathForProcessing(target.relPath))\n )\n ).sort();\n mapping[normalizeRegistryPath(file.registryPath)] = values;\n }\n\n return mapping;\n}\n\nfunction buildIndexMappingFromPlans(plans: GroupPlan[]): Record<string, string[]> {\n const mapping: Record<string, string[]> = {};\n\n for (const plan of plans) {\n refreshGroupTargetDirs(plan);\n const fileMappings = buildFallbackFileMappings(plan);\n for (const [registryPath, values] of Object.entries(fileMappings)) {\n const existing = mapping[registryPath] ?? [];\n mapping[registryPath] = Array.from(new Set([...existing, ...values]));\n }\n }\n\n return sortMapping(mapping);\n}\n\n// ============================================================================\n// Target Mapping Functions\n// ============================================================================\n\nfunction mapRegistryPathToTargets(\n cwd: string,\n registryPath: string,\n platforms: Platform[]\n): PlannedTarget[] {\n const normalized = normalizeRegistryPath(registryPath);\n const targets: PlannedTarget[] = [];\n\n const universalInfo = extractUniversalSubdirInfo(normalized, cwd);\n\n if (universalInfo) {\n const parsed = parseUniversalPath(normalized);\n\n if (parsed?.platformSuffix) {\n const targetPlatform = parsed.platformSuffix as Platform;\n if (platforms.includes(targetPlatform)) {\n try {\n const mapped = mapUniversalToPlatform(\n targetPlatform,\n parsed.universalSubdir,\n parsed.relPath,\n cwd\n );\n const targetAbs = join(cwd, mapped.relFile);\n targets.push({\n absPath: targetAbs,\n relPath: normalizeRelativePath(cwd, targetAbs),\n platform: targetPlatform\n });\n } catch (error) {\n logger.debug(`Platform ${targetPlatform} does not support ${normalized}: ${error}`);\n }\n }\n return targets;\n }\n\n const rel = parsed ? parsed.relPath : universalInfo.relPath;\n for (const platform of platforms) {\n try {\n const mapped = mapUniversalToPlatform(platform, universalInfo.universalSubdir, rel, cwd);\n const targetAbs = join(cwd, mapped.relFile);\n targets.push({\n absPath: targetAbs,\n relPath: normalizeRelativePath(cwd, targetAbs),\n platform\n });\n } catch (error) {\n logger.debug(`Platform ${platform} does not support ${normalized}: ${error}`);\n }\n }\n return targets;\n }\n\n const fallbackAbs = join(cwd, normalized);\n targets.push({\n absPath: fallbackAbs,\n relPath: normalizeRelativePath(cwd, fallbackAbs),\n platform: 'other'\n });\n return targets;\n}\n\nfunction attachTargetsToPlannedFiles(\n cwd: string,\n plannedFiles: PlannedFile[],\n platforms: Platform[]\n): void {\n const overriddenByBase = new Map<string, Set<Platform>>();\n for (const pf of plannedFiles) {\n const parsed = parseUniversalPath(pf.registryPath);\n if (parsed?.platformSuffix) {\n const baseKey = `${parsed.universalSubdir}/${parsed.relPath}`;\n if (!overriddenByBase.has(baseKey)) {\n overriddenByBase.set(baseKey, new Set());\n }\n overriddenByBase.get(baseKey)!.add(parsed.platformSuffix as Platform);\n }\n }\n\n for (const planned of plannedFiles) {\n const targets = mapRegistryPathToTargets(cwd, planned.registryPath, platforms);\n\n const parsed = parseUniversalPath(planned.registryPath);\n if (parsed && !parsed.platformSuffix) {\n const baseKey = `${parsed.universalSubdir}/${parsed.relPath}`;\n const excludedPlatforms = overriddenByBase.get(baseKey);\n if (excludedPlatforms && excludedPlatforms.size > 0) {\n planned.targets = targets.filter(t =>\n !(t.platform && t.platform !== 'other' && excludedPlatforms.has(t.platform as Platform))\n );\n } else {\n planned.targets = targets;\n }\n } else {\n planned.targets = targets;\n }\n }\n}\n\n// ============================================================================\n// Directory Collection Functions\n// ============================================================================\n\nfunction collectTargetDirectories(plannedFiles: PlannedFile[]): Set<string> {\n const dirs = new Set<string>();\n for (const planned of plannedFiles) {\n for (const target of planned.targets) {\n const dirName = dirname(target.relPath);\n if (!dirName || dirName === '.') continue;\n dirs.add(ensureTrailingSlash(normalizePathForProcessing(dirName)));\n }\n }\n return dirs;\n}\n\nfunction collectTargetDirectoriesByPlatform(\n plannedFiles: PlannedFile[]\n): Map<Platform | 'other', Set<string>> {\n const dirsByPlatform = new Map<Platform | 'other', Set<string>>();\n \n for (const planned of plannedFiles) {\n for (const target of planned.targets) {\n const platform = target.platform ?? 'other';\n if (!dirsByPlatform.has(platform)) {\n dirsByPlatform.set(platform, new Set());\n }\n const dirName = dirname(target.relPath);\n if (!dirName || dirName === '.') continue;\n dirsByPlatform.get(platform)!.add(ensureTrailingSlash(normalizePathForProcessing(dirName)));\n }\n }\n \n return dirsByPlatform;\n}\n\nasync function directoryHasEntries(absDir: string): Promise<boolean> {\n if (!(await exists(absDir))) return false;\n const files = await listFiles(absDir).catch(() => [] as string[]);\n if (files.length > 0) return true;\n const subdirs = await listDirectories(absDir).catch(() => [] as string[]);\n return subdirs.length > 0;\n}\n\n// ============================================================================\n// Platform Decision Functions\n// ============================================================================\n\nasync function checkPlatformDirectoryOccupancy(\n cwd: string,\n platformDirs: Set<string>\n): Promise<boolean> {\n for (const dirRel of platformDirs) {\n const absDir = join(cwd, dirRel);\n if (await directoryHasEntries(absDir)) {\n return true;\n }\n }\n return false;\n}\n\nfunction hadPreviousDirForPlatform(\n previousIndex: PackageIndexRecord | null,\n groupKey: string,\n platform: Platform | 'other'\n): boolean {\n if (!previousIndex || platform === 'other') {\n return false;\n }\n\n const prevValues = previousIndex.files[groupKey] ?? [];\n if (prevValues.length === 0) {\n return false;\n }\n\n const platformDef = getPlatformDefinition(platform);\n const rootDir = normalizePathForProcessing(deriveRootDirFromFlows(platformDef));\n\n for (const mapping of prevValues) {\n const value = getTargetPath(mapping);\n const normalizedValue = normalizePathForProcessing(value);\n if (\n normalizedValue === rootDir ||\n normalizedValue.startsWith(`${rootDir}/`)\n ) {\n return true;\n }\n }\n\n return false;\n}\n\nasync function determinePlatformDecisions(\n cwd: string,\n targetDirsByPlatform: Map<Platform | 'other', Set<string>>,\n wasDirKey: boolean,\n previousIndex: PackageIndexRecord | null,\n groupKey: string\n): Promise<Map<Platform | 'other', 'dir' | 'file'>> {\n const platformDecisions = new Map<Platform | 'other', 'dir' | 'file'>();\n\n for (const [platform, platformDirs] of targetDirsByPlatform.entries()) {\n if (wasDirKey && hadPreviousDirForPlatform(previousIndex, groupKey, platform)) {\n platformDecisions.set(platform, 'dir');\n continue;\n }\n\n const directoryOccupied = await checkPlatformDirectoryOccupancy(cwd, platformDirs);\n platformDecisions.set(platform, directoryOccupied ? 'file' : 'dir');\n }\n\n return platformDecisions;\n}\n\nfunction computeOverallDecision(\n platformDecisions: Map<Platform | 'other', 'dir' | 'file'>\n): 'dir' | 'file' {\n const hasAnyDirDecision = Array.from(platformDecisions.values()).some(d => d === 'dir');\n return hasAnyDirDecision ? 'dir' : 'file';\n}\n\nasync function decideGroupPlans(\n cwd: string,\n groups: Map<string, PlannedFile[]>,\n previousIndex: PackageIndexRecord | null,\n context: ExpandedIndexesContext\n): Promise<GroupPlan[]> {\n const plans: GroupPlan[] = [];\n const previousDirKeys = new Set(\n previousIndex\n ? Object.keys(previousIndex.files).filter(key => isDirKey(key))\n : []\n );\n\n for (const [groupKey, plannedFiles] of groups.entries()) {\n const targetDirs = collectTargetDirectories(plannedFiles);\n const targetDirsByPlatform = collectTargetDirectoriesByPlatform(plannedFiles);\n let decision: 'dir' | 'file' = 'file';\n const platformDecisions = new Map<Platform | 'other', 'dir' | 'file'>();\n\n const otherDirOwners = context.dirKeyOwners.get(groupKey) ?? [];\n const hasTargets = plannedFiles.some(file => file.targets.length > 0);\n\n if (groupKey !== '' && hasTargets && otherDirOwners.length === 0) {\n const wasDirKey = previousDirKeys.has(groupKey);\n const computedDecisions = await determinePlatformDecisions(\n cwd,\n targetDirsByPlatform,\n wasDirKey,\n previousIndex,\n groupKey\n );\n platformDecisions.clear();\n computedDecisions.forEach((value, key) => platformDecisions.set(key, value));\n decision = computeOverallDecision(platformDecisions);\n }\n\n plans.push({\n key: groupKey,\n plannedFiles,\n decision,\n platformDecisions,\n targetDirs\n });\n }\n\n return plans;\n}\n\n// ============================================================================\n// Shared Helper for Building Index Mappings\n// ============================================================================\n\nfunction addMappingValue(mapping: Record<string, string[]>, key: string, value: string): void {\n if (!mapping[key]) {\n mapping[key] = [];\n }\n if (!mapping[key]!.includes(value)) {\n mapping[key]!.push(value);\n }\n}\n\nasync function augmentIndexMappingWithRootAndCopyToRoot(\n cwd: string,\n mapping: Record<string, string[]>,\n packageFiles: PackageFile[],\n platforms: Platform[]\n): Promise<Record<string, string[]>> {\n const augmented: Record<string, string[]> = { ...mapping };\n\n const rootFileNames = getPlatformRootFileNames(platforms);\n const explicitRootKeys = new Set<string>();\n const hasAgents = packageFiles.some(file => normalizeRegistryPath(file.path) === FILE_PATTERNS.AGENTS_MD);\n\n for (const file of packageFiles) {\n const normalized = normalizeRegistryPath(file.path);\n\n if (rootFileNames.has(normalized) || isRootRegistryPath(normalized)) {\n explicitRootKeys.add(normalized);\n if (await exists(join(cwd, normalized))) {\n addMappingValue(augmented, normalized, normalized);\n }\n }\n }\n\n for (const file of packageFiles) {\n const normalized = normalizeRegistryPath(file.path);\n if (!isAllowedRegistryPath(normalized, cwd)) continue;\n if (isSkippableRegistryPath(normalized, cwd)) continue;\n if (await exists(join(cwd, normalized))) {\n addMappingValue(augmented, normalized, normalized);\n }\n }\n\n if (hasAgents) {\n for (const rootFile of rootFileNames) {\n if (rootFile === FILE_PATTERNS.AGENTS_MD) continue;\n if (explicitRootKeys.has(rootFile)) continue;\n if (await exists(join(cwd, rootFile))) {\n addMappingValue(augmented, FILE_PATTERNS.AGENTS_MD, rootFile);\n }\n }\n }\n\n return sortMapping(augmented);\n}\n\n/**\n * Build index mapping for package files using the same logic flow as installPackageByIndex\n * This function reuses the planning, grouping, and decision logic to ensure consistency\n * between installation and sync operations.\n * \n * @param cwd - Current working directory\n * @param packageFiles - Array of package files to build mapping for\n * @param platforms - Platforms to map files to\n * @param previousIndex - Previous index record (if any)\n * @param otherIndexes - Other package indexes for conflict detection\n * @returns Record mapping registry paths to installed paths\n */\nexport async function buildIndexMappingForPackageFiles(\n cwd: string,\n packageFiles: PackageFile[],\n platforms: Platform[],\n previousIndex: PackageIndexRecord | null,\n otherIndexes: PackageIndexRecord[]\n): Promise<Record<string, string[]>> {\n const registryEntries: RegistryFileEntry[] = packageFiles\n .filter(file => {\n const normalized = normalizeRegistryPath(file.path);\n if (isRootRegistryPath(normalized)) return false;\n if (isSkippableRegistryPath(normalized, cwd)) return false;\n return isAllowedRegistryPath(normalized, cwd);\n })\n .map(file => ({\n registryPath: normalizeRegistryPath(file.path),\n content: file.content,\n encoding: file.encoding as string | undefined\n }));\n\n if (registryEntries.length === 0) {\n return await augmentIndexMappingWithRootAndCopyToRoot(cwd, {}, packageFiles, platforms);\n }\n\n const plannedFiles = createPlannedFiles(registryEntries);\n attachTargetsToPlannedFiles(cwd, plannedFiles, platforms);\n \n const groups = groupPlannedFiles(plannedFiles, cwd);\n const context = await buildExpandedIndexesContext(cwd, otherIndexes);\n const groupPlans = await decideGroupPlans(cwd, groups, previousIndex, context);\n \n const mapping = buildIndexMappingFromPlans(groupPlans);\n return await augmentIndexMappingWithRootAndCopyToRoot(cwd, mapping, packageFiles, platforms);\n}\n", "import { getInstallableTypes } from '../resources/resource-registry.js';\n\nexport function normalizeConvertedMatchedPattern(\n matchedPattern: string | undefined\n): string | undefined {\n if (!matchedPattern) {\n return matchedPattern;\n }\n\n const normalized = matchedPattern.replace(/\\\\/g, '/').replace(/^\\.\\/?/, '');\n const hasRecursiveSuffix = normalized.endsWith('/**');\n const patternBase = hasRecursiveSuffix ? normalized.slice(0, -3) : normalized;\n const segments = patternBase.split('/').filter(Boolean);\n\n for (const type of getInstallableTypes()) {\n if (!type.dirName) {\n continue;\n }\n\n const anchorIndex = segments.lastIndexOf(type.dirName);\n if (anchorIndex === -1) {\n continue;\n }\n\n const anchoredPath = segments.slice(anchorIndex).join('/');\n return hasRecursiveSuffix ? `${anchoredPath}/**` : anchoredPath;\n }\n\n return normalized;\n}\n", "/**\n * Strategy Selector Module\n * \n * Selects the appropriate installation strategy based on package format and target platform.\n */\n\nimport type { InstallOptions } from '../../../types/index.js';\nimport type { FlowInstallContext, InstallationStrategy } from './types.js';\nimport { ConversionInstallStrategy } from './conversion-strategy.js';\nimport { FlowBasedInstallStrategy } from './flow-based-strategy.js';\nimport { needsConversion } from '../format-detector.js';\n\n/**\n * Select the appropriate installation strategy based on package format and platform\n * \n * Strategy selection:\n * 1. ConversionInstallStrategy - Cross-platform conversion required (source \u2260 target)\n * 2. FlowBasedInstallStrategy - Default for all other cases (universal or same-platform)\n * \n * @param context - Installation context with package metadata\n * @param options - Installation options\n * @returns Selected installation strategy\n */\nexport function selectInstallStrategy(\n context: FlowInstallContext,\n options?: InstallOptions\n): InstallationStrategy {\n const format = context.packageFormat;\n const platform = context.platform;\n \n // If no format provided, default to flow-based strategy\n if (!format) {\n return new FlowBasedInstallStrategy();\n }\n \n // Check if conversion is needed\n if (needsConversion(format, platform)) {\n return new ConversionInstallStrategy();\n }\n \n // Default: flow-based strategy\n return new FlowBasedInstallStrategy();\n}\n", "/**\n * Format Detection Helpers\n * \n * Shared utilities for detecting package formats from directories.\n * Extracts common logic used by both flow-based-installer and flow-index-installer.\n */\n\nimport { relative } from 'path';\nimport { walkFiles } from '../../../utils/file-walker.js';\nimport { \n detectPackageFormat, \n detectPackageFormatWithContext,\n type PackageFormat\n} from '../format-detector.js';\nimport type { PackageConversionContext } from '../../../types/conversion-context.js';\nimport { logger } from '../../../utils/logger.js';\n\n/**\n * Load file list from package directory for format detection\n * Excludes .git directories\n * \n * @param packageRoot - Root directory of the package\n * @returns Array of file paths with empty content strings\n */\nexport async function loadPackageFileList(\n packageRoot: string\n): Promise<Array<{ path: string; content: string }>> {\n const files: Array<{ path: string; content: string }> = [];\n \n try {\n for await (const fullPath of walkFiles(packageRoot)) {\n const relativePath = relative(packageRoot, fullPath);\n \n // Skip git metadata\n if (relativePath.startsWith('.git/') || relativePath === '.git') {\n continue;\n }\n \n files.push({\n path: relativePath,\n content: ''\n });\n }\n } catch (error) {\n logger.error('Failed to read package directory for format detection', { \n packageRoot, \n error \n });\n }\n \n return files;\n}\n\n/**\n * Detect package format from directory\n * \n * Convenience wrapper that loads file list and detects format.\n * \n * @param packageRoot - Root directory of the package\n * @returns Detected package format\n */\nexport async function detectFormatFromDirectory(\n packageRoot: string\n): Promise<PackageFormat> {\n const files = await loadPackageFileList(packageRoot);\n return detectPackageFormat(files);\n}\n\n/**\n * Detect package format with conversion context from directory\n * \n * Convenience wrapper that loads file list and detects format with context.\n * Use this when you need both format and conversion context for the installation pipeline.\n * \n * @param packageRoot - Root directory of the package\n * @returns Detected format and conversion context\n */\nexport async function detectFormatWithContextFromDirectory(\n packageRoot: string\n): Promise<{ format: PackageFormat; context: PackageConversionContext }> {\n const files = await loadPackageFileList(packageRoot);\n return detectPackageFormatWithContext(files);\n}\n", "/**\n * Flow-Based Installer Module\n * \n * Handles installation of package files using the declarative flow system.\n * Integrates with the existing install pipeline to execute flow transformations\n * for each package file, with multi-package composition and priority-based merging.\n * \n * This module now serves as a thin orchestration layer, delegating to specialized\n * strategy implementations for different installation scenarios.\n * \n * CORE LAYER: Pure flow execution logic\n * For workspace-integrated installation with index updates, see flow-index-installer.ts\n */\n\nimport type { InstallOptions } from '../../types/index.js';\nimport { platformUsesFlows } from '../platforms.js';\nimport { logger } from '../../utils/logger.js';\nimport {\n selectInstallStrategy,\n type FlowInstallContext,\n type FlowInstallResult,\n type FlowConflictReport,\n type FlowInstallError\n} from './strategies/index.js';\nimport { detectFormatFromDirectory } from './helpers/format-detection.js';\nimport { logInstallationResult } from './helpers/result-logging.js';\n// Re-export types for backward compatibility\nexport type {\n FlowInstallContext,\n FlowInstallResult,\n FlowConflictReport,\n FlowInstallError\n};\n\n// ============================================================================\n// Main Installation API\n// ============================================================================\n\n/**\n * Execute flows for a single package installation with format detection and conversion\n * \n * This is the main entry point for flow-based installation. It:\n * 1. Detects the package format (universal vs platform-specific)\n * 2. Selects the appropriate installation strategy\n * 3. Delegates to the strategy for execution\n * \n * @param installContext - Installation context with package metadata\n * @param options - Installation options (e.g., dryRun)\n * @returns Installation result with file mappings and metrics\n */\nexport async function installPackageWithFlows(\n installContext: FlowInstallContext,\n options?: InstallOptions,\n forceOverwrite: boolean = false\n): Promise<FlowInstallResult> {\n const {\n packageName,\n packageRoot,\n workspaceRoot,\n platform,\n packageVersion,\n priority,\n dryRun\n } = installContext;\n \n const result: FlowInstallResult = {\n success: true,\n filesProcessed: 0,\n filesWritten: 0,\n conflicts: [],\n errors: [],\n targetPaths: [],\n fileMapping: {}\n };\n \n try {\n // Check if platform uses flows\n if (!platformUsesFlows(platform, workspaceRoot)) {\n logger.debug(`Platform ${platform} does not use flows, skipping flow-based installation`);\n return result;\n }\n \n // Phase 1: Get or detect package format\n const packageFormat = installContext.packageFormat || \n await detectFormatFromDirectory(packageRoot);\n \n logger.debug('Package format', {\n package: packageName,\n type: packageFormat.type,\n platform: packageFormat.platform,\n confidence: packageFormat.confidence,\n targetPlatform: platform,\n source: installContext.packageFormat ? 'provided' : 'detected'\n });\n \n // Phase 2: Select and execute installation strategy\n const enrichedContext: FlowInstallContext = {\n ...installContext,\n packageFormat\n };\n \n const strategy = selectInstallStrategy(enrichedContext, options);\n const strategyResult = await strategy.install(enrichedContext, options, forceOverwrite);\n \n // Log results using shared utility\n logInstallationResult(strategyResult, packageName, platform, dryRun ?? false);\n \n return strategyResult;\n \n } catch (error) {\n result.success = false;\n logger.error(`Failed to install package ${packageName} with flows: ${(error as Error).message}`);\n result.errors.push({\n flow: { from: packageRoot, to: workspaceRoot },\n sourcePath: packageRoot,\n error: error as Error,\n message: `Installation failed: ${(error as Error).message}`\n });\n return result;\n }\n}\n", "/**\n * Stale File Cleanup\n *\n * Detects and removes files that were part of a previous installation but are\n * no longer present in the updated package source. Runs after flow execution\n * (so the new file mapping is known) but before the workspace index is updated\n * (so the previous mapping is still readable).\n *\n * Reuses `removeFileMapping` from the uninstaller for all file removal logic.\n */\n\nimport { join } from 'path';\n\nimport { removeFileMapping } from '../uninstall/flow-aware-uninstaller.js';\nimport { cleanupEmptyParents } from '../../utils/cleanup-empty-parents.js';\nimport { buildPreservedDirectoriesSet } from '../platform/directory-preservation.js';\nimport { getPlatformRootFileNames } from '../platform/platform-root-files.js';\nimport { isDirKey } from '../../utils/package-index-yml.js';\nimport { getTargetPath } from '../../utils/workspace-index-helpers.js';\nimport { normalizePathForProcessing, getRelativePathFromBase } from '../../utils/path-normalization.js';\nimport { walkFiles } from '../../utils/fs.js';\nimport {\n loadOtherPackageIndexes,\n buildExpandedIndexesContext,\n} from './index-based-installer.js';\nimport {\n type OwnershipContext,\n} from './conflicts/file-conflict-resolver.js';\nimport { logger } from '../../utils/logger.js';\nimport type { Platform } from '../platforms.js';\nimport type { WorkspaceIndexFileMapping } from '../../types/workspace-index.js';\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface StaleCleanupResult {\n /** Workspace-relative paths fully removed from disk */\n deleted: string[];\n /** Merge files that had keys removed but were not deleted */\n updated: string[];\n}\n\n// ============================================================================\n// Main\n// ============================================================================\n\nexport async function removeStaleFiles(options: {\n cwd: string;\n packageName: string;\n previousFiles: Record<string, (string | WorkspaceIndexFileMapping)[]>;\n newFileMapping: Record<string, (string | WorkspaceIndexFileMapping)[]>;\n platforms: Platform[];\n dryRun: boolean;\n matchedPattern?: string;\n ownershipContext?: OwnershipContext;\n}): Promise<StaleCleanupResult> {\n const {\n cwd,\n packageName,\n previousFiles,\n newFileMapping,\n platforms,\n dryRun,\n matchedPattern,\n ownershipContext,\n } = options;\n\n const deleted: string[] = [];\n const updated: string[] = [];\n\n if (dryRun) {\n return { deleted, updated };\n }\n\n // -------------------------------------------------------------------\n // 1. Filter for resource scope\n // -------------------------------------------------------------------\n let scopedPreviousFiles = previousFiles;\n\n if (matchedPattern) {\n // Extract the non-glob prefix from the matched pattern to determine scope\n const normalizedPattern = matchedPattern.replace(/\\\\/g, '/');\n const firstGlob = normalizedPattern.search(/[*?{[]/);\n const scopePrefix = firstGlob > 0\n ? normalizedPattern.slice(0, firstGlob)\n : firstGlob === -1\n ? normalizedPattern\n : '';\n\n if (scopePrefix) {\n scopedPreviousFiles = {};\n for (const [key, values] of Object.entries(previousFiles)) {\n const normalizedKey = key.replace(/\\\\/g, '/');\n if (normalizedKey.startsWith(scopePrefix) || scopePrefix.startsWith(normalizedKey)) {\n scopedPreviousFiles[key] = values;\n }\n }\n }\n }\n\n // -------------------------------------------------------------------\n // 2. Build previous target map\n // -------------------------------------------------------------------\n const rootFileNames = getPlatformRootFileNames(platforms, cwd);\n const previousTargetMap = new Map<string, string | WorkspaceIndexFileMapping>();\n\n for (const [sourceKey, mappings] of Object.entries(scopedPreviousFiles)) {\n // Skip root file keys (managed by root file system via delimiters)\n if (rootFileNames.has(sourceKey)) continue;\n\n if (isDirKey(sourceKey)) {\n // Directory key: expand to actual files on disk\n for (const mapping of mappings) {\n const dirRel = getTargetPath(mapping);\n const absDir = join(cwd, dirRel);\n try {\n for await (const absFile of walkFiles(absDir)) {\n const rel = getRelativePathFromBase(absFile, cwd);\n // Map each expanded file to the dir-level mapping\n previousTargetMap.set(rel, mapping);\n }\n } catch {\n // Directory may not exist \u2014 fine, no stale files from it\n }\n }\n } else {\n // File key: normalize target path\n for (const mapping of mappings) {\n const target = normalizePathForProcessing(getTargetPath(mapping));\n previousTargetMap.set(target, mapping);\n }\n }\n }\n\n // -------------------------------------------------------------------\n // 3. Build new target set\n // -------------------------------------------------------------------\n const newTargetSet = new Set<string>();\n\n for (const [sourceKey, mappings] of Object.entries(newFileMapping)) {\n if (isDirKey(sourceKey)) {\n for (const mapping of mappings) {\n const dirRel = getTargetPath(mapping);\n const absDir = join(cwd, dirRel);\n try {\n for await (const absFile of walkFiles(absDir)) {\n const rel = getRelativePathFromBase(absFile, cwd);\n newTargetSet.add(rel);\n }\n } catch {\n // Directory may not exist yet\n }\n }\n } else {\n for (const mapping of mappings) {\n newTargetSet.add(normalizePathForProcessing(getTargetPath(mapping)));\n }\n }\n }\n\n // -------------------------------------------------------------------\n // 4. Build or reuse ownership context\n // -------------------------------------------------------------------\n let installedPathOwners: Map<string, unknown>;\n\n if (ownershipContext) {\n installedPathOwners = ownershipContext.expandedIndexes.installedPathOwners;\n } else {\n try {\n const otherIndexes = await loadOtherPackageIndexes(cwd, packageName);\n const ctx = await buildExpandedIndexesContext(cwd, otherIndexes);\n installedPathOwners = ctx.installedPathOwners;\n } catch {\n installedPathOwners = new Map();\n }\n }\n\n // -------------------------------------------------------------------\n // 5. Compute and execute stale removals\n // -------------------------------------------------------------------\n const absoluteDeletedPaths: string[] = [];\n\n for (const [prevPath, prevMapping] of previousTargetMap) {\n if (newTargetSet.has(prevPath)) continue;\n\n // Skip if another package owns this path\n if (installedPathOwners.has(prevPath)) continue;\n\n try {\n const result = await removeFileMapping(cwd, prevMapping, packageName);\n\n if (result.removed.length > 0) {\n deleted.push(...result.removed);\n absoluteDeletedPaths.push(\n ...result.removed.map(rel => join(cwd, rel))\n );\n }\n\n if (result.updated.length > 0) {\n updated.push(...result.updated);\n }\n } catch (error) {\n logger.warn(\n `Failed to remove stale file ${prevPath} for ${packageName}: ${error}`\n );\n }\n }\n\n // -------------------------------------------------------------------\n // 6. Clean up empty directories\n // -------------------------------------------------------------------\n if (absoluteDeletedPaths.length > 0) {\n try {\n const preserved = buildPreservedDirectoriesSet(cwd);\n await cleanupEmptyParents(cwd, absoluteDeletedPaths, preserved);\n } catch (error) {\n logger.debug(`Empty directory cleanup failed: ${error}`);\n }\n }\n\n return { deleted, updated };\n}\n", "/**\n * Flow-Aware Uninstaller\n * \n * Handles uninstallation of packages installed with flows,\n * including precise removal of keys from merged files.\n */\n\nimport { join } from 'path';\nimport { readTextFile, writeTextFile, exists, remove } from '../../utils/fs.js';\nimport { logger } from '../../utils/logger.js';\nimport type { WorkspaceIndexFileMapping } from '../../types/workspace-index.js';\nimport { deleteNestedKey, isEffectivelyEmpty } from '../flows/flow-key-extractor.js';\nimport yaml from 'js-yaml';\nimport * as TOML from 'smol-toml';\n\n/**\n * File format detection\n */\ntype FileFormat = 'json' | 'jsonc' | 'yaml' | 'yml' | 'toml' | 'text';\n\nfunction detectFileFormat(filePath: string): FileFormat {\n const ext = filePath.toLowerCase().split('.').pop();\n switch (ext) {\n case 'json':\n return 'json';\n case 'jsonc':\n return 'jsonc';\n case 'yaml':\n case 'yml':\n return 'yaml';\n case 'toml':\n return 'toml';\n default:\n return 'text';\n }\n}\n\n/**\n * Parse file content based on format\n */\nfunction parseContent(content: string, format: FileFormat): any {\n try {\n switch (format) {\n case 'json':\n case 'jsonc':\n // Strip comments for JSONC\n const cleaned = format === 'jsonc' \n ? content.replace(/\\/\\/.*$/gm, '').replace(/\\/\\*[\\s\\S]*?\\*\\//g, '')\n : content;\n return JSON.parse(cleaned);\n \n case 'yaml':\n case 'yml':\n return yaml.load(content);\n \n case 'toml':\n return TOML.parse(content);\n \n default:\n return content;\n }\n } catch (error) {\n throw new Error(`Failed to parse ${format} file: ${error instanceof Error ? error.message : String(error)}`);\n }\n}\n\n/**\n * Serialize content based on format\n */\nfunction serializeContent(data: any, format: FileFormat): string {\n try {\n switch (format) {\n case 'json':\n case 'jsonc':\n return JSON.stringify(data, null, 2);\n \n case 'yaml':\n case 'yml':\n return yaml.dump(data, { indent: 2, flowLevel: 1, lineWidth: -1 });\n \n case 'toml':\n return TOML.stringify(data);\n \n default:\n return typeof data === 'string' ? data : JSON.stringify(data, null, 2);\n }\n } catch (error) {\n throw new Error(`Failed to serialize ${format} file: ${error instanceof Error ? error.message : String(error)}`);\n }\n}\n\n/**\n * Remove specific keys from a merged file\n * Deletes the file if it becomes empty after removal\n * \n * @param targetDir - Target directory (workspace root or global home)\n * @param targetPath - Relative path to target file\n * @param keysToRemove - Dot-notated keys to remove\n * @returns true if file was deleted, false if updated\n */\nexport async function removeKeysFromMergedFile(\n targetDir: string,\n targetPath: string,\n keysToRemove: string[]\n): Promise<{ deleted: boolean; updated: boolean }> {\n const absPath = join(targetDir, targetPath);\n\n if (!(await exists(absPath))) {\n return { deleted: false, updated: false };\n }\n\n // Load and parse file\n const content = await readTextFile(absPath);\n const format = detectFileFormat(targetPath);\n let data: any;\n\n try {\n data = parseContent(content, format);\n } catch (error) {\n logger.warn(`Failed to parse ${targetPath} for key removal: ${error instanceof Error ? error.message : String(error)}`);\n return { deleted: false, updated: false };\n }\n\n const hasNestedKeyPath = (obj: any, keyPath: string): boolean => {\n if (!obj || typeof obj !== 'object') return false;\n const parts = keyPath.split('.').filter(Boolean);\n let current: any = obj;\n for (const part of parts) {\n if (!current || typeof current !== 'object' || !(part in current)) return false;\n current = current[part];\n }\n return true;\n };\n\n const existingBefore = keysToRemove.filter(k => typeof k === 'string' && hasNestedKeyPath(data, k)).length;\n // Remove each key\n for (const key of keysToRemove) {\n deleteNestedKey(data, key);\n }\n\n // Check if file is now empty\n if (isEffectivelyEmpty(data)) {\n await remove(absPath);\n logger.info(`Removed empty file: ${targetPath}`);\n return { deleted: true, updated: false };\n }\n\n // Write back updated content\n const serialized = serializeContent(data, format);\n await writeTextFile(absPath, serialized);\n logger.info(`Updated ${targetPath}: removed ${keysToRemove.length} keys`);\n return { deleted: false, updated: true };\n}\n\n/**\n * Remove a file mapping during uninstall\n * Handles both simple file removal and key-based removal from merged files\n * \n * @param targetDir - Target directory (workspace root or global home)\n * @param mapping - File mapping from workspace index\n * @param packageName - Package being uninstalled (for logging)\n * @returns Paths that were removed or updated\n */\nexport async function removeFileMapping(\n targetDir: string,\n mapping: string | WorkspaceIndexFileMapping,\n packageName: string\n): Promise<{ removed: string[]; updated: string[] }> {\n const removed: string[] = [];\n const updated: string[] = [];\n\n if (typeof mapping === 'string') {\n // Simple file mapping - delete entire file\n const absPath = join(targetDir, mapping);\n if (await exists(absPath)) {\n await remove(absPath);\n removed.push(mapping);\n logger.debug(`Removed file: ${mapping}`);\n }\n } else {\n // Complex mapping with potential key tracking\n const targetPath = mapping.target;\n\n if (mapping.merge === 'composite') {\n // Composite merge uses delimiters - handled by existing root file logic\n // This is already handled by applyRootFileRemovals\n logger.debug(`Skipping composite merge file (handled by root file logic): ${targetPath}`);\n } else if (mapping.keys && mapping.keys.length > 0) {\n // Remove specific keys from merged file\n const result = await removeKeysFromMergedFile(targetDir, targetPath, mapping.keys);\n \n if (result.deleted) {\n removed.push(targetPath);\n } else if (result.updated) {\n updated.push(targetPath);\n }\n\n logger.debug(\n `Removed ${mapping.keys.length} keys from ${targetPath}: ${mapping.keys.join(', ')}`\n );\n } else if (mapping.merge === 'deep' || mapping.merge === 'shallow') {\n // Merged file but no key tracking - this shouldn't happen with new installs\n logger.warn(\n `Cannot precisely remove ${targetPath} for ${packageName} - no key tracking available. ` +\n `File may contain content from other packages.`\n );\n // Don't delete - safer to leave it\n } else {\n // merge: 'replace' or no merge - delete entire file\n const absPath = join(targetDir, targetPath);\n if (await exists(absPath)) {\n await remove(absPath);\n removed.push(targetPath);\n logger.debug(`Removed file: ${targetPath}`);\n }\n }\n }\n\n return { removed, updated };\n}\n", "import path from 'path';\nimport { readdir } from 'fs/promises';\n\nimport { remove } from './fs.js';\nimport { logger } from './logger.js';\n\n/**\n * Clean up empty parent directories after file deletion.\n * \n * Walks up the directory tree from each deleted file, removing empty directories\n * until hitting a preserved directory or the root.\n * \n * For platform files (e.g., .cursor/commands/essentials/file.md):\n * - Removes empty subdirectories (essentials/, commands/)\n * - Stops at and preserves the platform root (.cursor/)\n * \n * For root files (e.g., docs/guides/file.md):\n * - Removes all empty parent directories\n * - Stops only at workspace root\n * \n * @param rootDir - Root directory boundary (workspace root or package root)\n * @param deletedPaths - Absolute paths of deleted files\n * @param preservedDirs - Set of absolute directory paths to preserve (never remove)\n */\nexport async function cleanupEmptyParents(\n rootDir: string,\n deletedPaths: string[],\n preservedDirs: Set<string> = new Set()\n): Promise<void> {\n const candidateDirs = new Set<string>();\n\n // Collect all parent directories from deleted files\n for (const deletedPath of deletedPaths) {\n let current = path.dirname(deletedPath);\n \n // Walk up the directory tree\n while (current.startsWith(rootDir) && current !== rootDir) {\n // Stop at preserved directories (platform roots)\n if (preservedDirs.has(current)) {\n break;\n }\n \n candidateDirs.add(current);\n current = path.dirname(current);\n }\n }\n\n // Sort by depth (deepest first) to ensure we process child directories before parents\n const sorted = Array.from(candidateDirs).sort((a, b) => b.length - a.length);\n \n // Remove empty directories\n for (const dir of sorted) {\n try {\n const entries = await readdir(dir);\n \n // Only remove if directory is empty and not preserved\n if (entries.length === 0 && !preservedDirs.has(dir)) {\n await remove(dir);\n logger.debug(`Removed empty directory: ${path.relative(rootDir, dir)}`);\n }\n } catch (error) {\n // Ignore errors (directory may not exist, permission issues, etc.)\n logger.debug(`Could not process directory ${dir}: ${error}`);\n }\n }\n}\n", "/**\n * Directory Preservation Utilities\n * \n * Determines which directories should be preserved during uninstall cleanup.\n * Uses platform detection patterns to identify platform root directories that\n * should never be removed, even if empty.\n */\n\nimport path from 'path';\nimport { getAllPlatforms, getPlatformDefinition } from '../platforms.js';\n\n/**\n * Extract the directory path from a detection pattern.\n * \n * Detection patterns can be:\n * - Directory names: \".cursor\", \".claude\"\n * - File paths: \".claude-plugin/plugin.json\", \"CLAUDE.md\"\n * - Future: glob patterns\n * \n * @param pattern - Detection pattern from platform definition\n * @param targetDir - Target directory (workspace root or global home)\n * @returns Absolute directory path to preserve, or null if pattern points to workspace root\n * \n * @example\n * extractDirectoryFromPattern(\".cursor\", \"/workspace\") \n * \u2192 \"/workspace/.cursor\"\n * \n * extractDirectoryFromPattern(\".claude-plugin/plugin.json\", \"/workspace\")\n * \u2192 \"/workspace/.claude-plugin\"\n * \n * extractDirectoryFromPattern(\"CLAUDE.md\", \"/workspace\")\n * \u2192 null (root file, don't preserve workspace root)\n */\nexport function extractDirectoryFromPattern(pattern: string, targetDir: string): string | null {\n // Normalize path separators\n const normalized = pattern.replace(/\\\\/g, '/');\n \n // Check if it's a directory or a file\n // A dotfile directory (e.g., \".cursor\", \".claude\") is a single segment starting with \".\"\n // and no path separators \u2014 distinct from a file extension like \"file.md\"\n const isDotfileDir = /^\\.[^./]+$/.test(normalized);\n const hasExtension = !isDotfileDir && /\\.[a-z0-9]+$/i.test(normalized);\n const isDirectory = normalized.endsWith('/') || !hasExtension;\n \n let dirPath: string;\n \n if (isDirectory) {\n // Remove trailing slash if present\n dirPath = normalized.replace(/\\/$/, '');\n } else {\n // Extract directory from file path\n const lastSlash = normalized.lastIndexOf('/');\n dirPath = lastSlash >= 0 ? normalized.substring(0, lastSlash) : '';\n }\n \n // Convert to absolute path\n const absPath = dirPath ? path.join(targetDir, dirPath) : targetDir;\n \n // Don't preserve the workspace root itself\n // Root files (like CLAUDE.md, AGENTS.md) have their own preservation logic\n return absPath === targetDir ? null : absPath;\n}\n\n/**\n * Build a Set of absolute directory paths that should be preserved during cleanup.\n * \n * These directories are identified from platform detection patterns and represent\n * platform root directories (e.g., .cursor, .claude, .opencode) that should never\n * be removed, even if they become empty after uninstalling packages.\n * \n * @param targetDir - Target directory (workspace root or global home)\n * @returns Set of absolute directory paths to preserve\n * \n * @example\n * buildPreservedDirectoriesSet(\"/workspace\")\n * \u2192 Set([\n * \"/workspace/.cursor\",\n * \"/workspace/.claude\", \n * \"/workspace/.opencode\",\n * \"/workspace/.claude-plugin\",\n * ...\n * ])\n */\nexport function buildPreservedDirectoriesSet(targetDir: string): Set<string> {\n const preserved = new Set<string>();\n const platforms = getAllPlatforms(undefined, targetDir);\n \n for (const platform of platforms) {\n const definition = getPlatformDefinition(platform, targetDir);\n \n // Primary: use detection patterns\n if (definition.detection && definition.detection.length > 0) {\n for (const pattern of definition.detection) {\n const dirToPreserve = extractDirectoryFromPattern(pattern, targetDir);\n if (dirToPreserve) {\n preserved.add(dirToPreserve);\n }\n }\n } else if (definition.rootDir) {\n // Fallback: use rootDir for platforms without detection patterns\n const rootPath = path.join(targetDir, definition.rootDir);\n if (rootPath !== targetDir) {\n preserved.add(rootPath);\n }\n }\n }\n \n return preserved;\n}\n", "/**\n * Result Aggregation Helpers\n * \n * Shared utilities for aggregating installation results across multiple packages or platforms.\n * Handles merging of file mappings, conflicts, errors, and statistics.\n */\n\nimport type { FlowInstallResult } from '../strategies/types.js';\nimport type { WorkspaceIndexFileMapping } from '../../../types/workspace-index.js';\n\n/**\n * Merge file mappings from multiple results\n * Simple deduplication by concatenating unique targets\n * \n * @param target - Target mapping to merge into (mutated)\n * @param source - Source mapping to merge from\n */\nexport function mergeFileMappings(\n target: Record<string, (string | WorkspaceIndexFileMapping)[]>,\n source: Record<string, (string | WorkspaceIndexFileMapping)[]>\n): void {\n for (const [sourceKey, targets] of Object.entries(source)) {\n const existing = target[sourceKey] ?? [];\n target[sourceKey] = Array.from(new Set([...existing, ...targets])).sort();\n }\n}\n\n/**\n * Merge file mappings with workspace index format\n * Prefers complex mappings over simple strings, deduplicates by target path\n * \n * This is used by flow-index-installer which needs to handle both string and\n * WorkspaceIndexFileMapping formats.\n * \n * @param target - Target mapping to merge into (mutated)\n * @param source - Source mapping to merge from\n */\nexport function mergeWorkspaceFileMappings(\n target: Record<string, (string | WorkspaceIndexFileMapping)[]>,\n source: Record<string, (string | WorkspaceIndexFileMapping)[]>\n): void {\n for (const [sourceKey, targets] of Object.entries(source)) {\n const existing = target[sourceKey] ?? [];\n \n // Deduplicate by target path, prefer complex mapping over string\n const byTarget = new Map<string, string | WorkspaceIndexFileMapping>();\n \n for (const m of existing) {\n const targetPath = typeof m === 'string' ? m : m.target;\n byTarget.set(targetPath, m);\n }\n \n for (const m of targets) {\n const targetPath = typeof m === 'string' ? m : m.target;\n const prior = byTarget.get(targetPath);\n \n if (!prior) {\n byTarget.set(targetPath, m);\n } else if (typeof prior === 'string' && typeof m !== 'string') {\n // Prefer complex mapping over string\n byTarget.set(targetPath, m);\n }\n }\n \n target[sourceKey] = Array.from(byTarget.values());\n }\n}\n\n/**\n * Aggregate flow install results\n * \n * Merges statistics, errors, conflicts, target paths, and file mappings from\n * source into target. Target is mutated.\n * \n * @param target - Target result to aggregate into (mutated)\n * @param source - Source result to aggregate from\n */\nexport function aggregateFlowResults(\n target: FlowInstallResult,\n source: FlowInstallResult\n): void {\n target.filesProcessed += source.filesProcessed;\n target.filesWritten += source.filesWritten;\n target.errors.push(...source.errors);\n target.conflicts.push(...source.conflicts);\n target.targetPaths.push(...(source.targetPaths ?? []));\n \n mergeFileMappings(target.fileMapping, source.fileMapping ?? {});\n \n if (!source.success) {\n target.success = false;\n }\n}\n\n/**\n * Collect unique conflict messages\n * \n * Converts FlowConflictReport objects to strings and adds unique messages to target array.\n * Used by flow-index-installer to aggregate conflicts across platforms.\n * \n * @param target - Target array to add messages to (mutated)\n * @param conflicts - Source conflicts to convert and add\n */\nexport function collectConflictMessages(\n target: string[],\n conflicts: FlowInstallResult['conflicts']\n): void {\n for (const conflict of conflicts) {\n const msg = `${conflict.targetPath}: ${conflict.message}`;\n if (!target.includes(msg)) {\n target.push(msg);\n }\n }\n}\n\n/**\n * Collect unique error messages\n * \n * Converts FlowInstallError objects to strings and adds unique messages to target array.\n * Used by flow-index-installer to aggregate errors across platforms.\n * \n * @param target - Target array to add messages to (mutated)\n * @param errors - Source errors to convert and add\n */\nexport function collectErrorMessages(\n target: string[],\n errors: FlowInstallResult['errors']\n): void {\n for (const error of errors) {\n const msg = `${error.sourcePath}: ${error.message}`;\n if (!target.includes(msg)) {\n target.push(msg);\n }\n }\n}\n", "/**\n * Installation Executor\n *\n * Contains the index-based installation logic for executing package installations.\n * Migrated from install-flow.ts to support the unified pipeline.\n */\n\nimport { InstallOptions } from '../../../types/index.js';\nimport type { ResolvedPackage } from '../../dependency-resolver/types.js';\nimport { type Platform } from '../../platforms.js';\nimport { logger } from '../../../utils/logger.js';\nimport { UserCancellationError } from '../../../utils/errors.js';\nimport { discoverAndCategorizeFiles } from '../helpers/file-discovery.js';\nimport { installOrSyncRootFiles } from './root-files.js';\nimport { installPackageByIndexWithFlows as installPackageByIndex, type IndexInstallResult } from '../flow-index-installer.js';\nimport type { RelocatedFile } from '../conflicts/file-conflict-resolver.js';\nimport type { IndexWriteCollector } from '../wave-resolver/index-write-collector.js';\nimport { checkAndHandleAllPackageConflicts } from './conflict-handler.js';\nimport { readWorkspaceIndex, writeWorkspaceIndex } from '../../../utils/workspace-index-yml.js';\nimport { deduplicateTargets } from '../../../utils/workspace-index-helpers.js';\nimport type { PromptPort } from '../../ports/prompt.js';\nimport type { InstallScope, WorkspaceIndexFileMapping } from '../../../types/workspace-index.js';\n\nexport type ConflictSummary = Awaited<ReturnType<typeof checkAndHandleAllPackageConflicts>>;\n\nexport interface InstallationPhasesParams {\n cwd: string;\n packages: ResolvedPackage[];\n platforms: Platform[];\n conflictResult?: ConflictSummary;\n options: InstallOptions;\n targetDir: string;\n matchedPattern?: string;\n prompt?: PromptPort;\n indexWriteCollector?: IndexWriteCollector;\n sharedOwnershipContext?: import('../conflicts/file-conflict-resolver.js').OwnershipContext;\n installScope?: InstallScope;\n}\n\nexport interface InstallationPhasesResult {\n installedCount: number;\n skippedCount: number;\n errorCount: number;\n allAddedFiles: string[];\n allUpdatedFiles: string[];\n rootFileResults: { installed: string[]; updated: string[]; skipped: string[] };\n totalOpenPackageFiles: number;\n errors?: string[];\n /** True when namespace conflict resolution was triggered for any package */\n namespaced?: boolean;\n /** Paths of files that were installed/updated under namespace conflict resolution */\n namespacedFiles?: string[];\n /** Files that were physically relocated on disk during namespace resolution */\n relocatedFiles?: RelocatedFile[];\n /** Absolute paths of files that were auto-claimed (content identical, unowned on disk) */\n claimedFiles?: string[];\n}\n\n/**\n * Perform the index-based installation process\n *\n * Installs each package using the index-based installer and handles root files.\n */\nexport async function performIndexBasedInstallationPhases(params: InstallationPhasesParams): Promise<InstallationPhasesResult> {\n const { cwd, packages, platforms, conflictResult, options, targetDir, matchedPattern, prompt, indexWriteCollector, sharedOwnershipContext } = params;\n\n let totalInstalled = 0;\n let totalUpdated = 0;\n let totalDeleted = 0;\n let totalSkipped = 0;\n let totalErrors = 0;\n const allAddedFiles: string[] = [];\n const allUpdatedFiles: string[] = [];\n const allDeletedFiles: string[] = [];\n const errors: string[] = [];\n const allNamespacedFiles: string[] = [];\n const allClaimedFiles: string[] = [];\n const allRelocatedFiles: RelocatedFile[] = [];\n\n for (const resolved of packages) {\n try {\n // Extract originalContentRoot if it was stored during conversion\n const originalContentRoot = (resolved as any).originalContentRoot;\n\n // Check if the package-level conflict phase confirmed an overwrite for this package\n const forceOverwrite = conflictResult?.forceOverwritePackages?.has(resolved.name) ?? false;\n \n const installResult: IndexInstallResult = await installPackageByIndex(\n cwd,\n resolved.name,\n resolved.version,\n platforms,\n options,\n resolved.contentRoot,\n resolved.pkg._format,\n resolved.marketplaceMetadata,\n matchedPattern,\n resolved.resourceVersion,\n originalContentRoot, // Pass original path for index writing\n forceOverwrite, // Phase 5: propagate package-level overwrite decision\n prompt,\n indexWriteCollector,\n sharedOwnershipContext,\n undefined, // sourceType\n params.installScope\n );\n\n totalInstalled += installResult.installed;\n totalUpdated += installResult.updated;\n totalDeleted += installResult.deleted;\n totalSkipped += installResult.skipped;\n\n allAddedFiles.push(...installResult.installedFiles);\n allUpdatedFiles.push(...installResult.updatedFiles);\n allDeletedFiles.push(...installResult.deletedFiles);\n\n // Aggregate namespace metadata\n if (installResult.namespacedFiles) {\n allNamespacedFiles.push(...installResult.namespacedFiles);\n }\n if (installResult.claimedFiles) {\n allClaimedFiles.push(...installResult.claimedFiles);\n }\n if (installResult.relocatedFiles) {\n allRelocatedFiles.push(...installResult.relocatedFiles);\n }\n\n if (installResult.installed > 0 || installResult.updated > 0 || installResult.deleted > 0) {\n logger.info(`Index-based install for ${resolved.name}: ${installResult.installed} installed, ${installResult.updated} updated, ${installResult.deleted} deleted`);\n }\n } catch (error) {\n if (error instanceof UserCancellationError) {\n throw error;\n }\n const errorMsg = `Failed index-based install for ${resolved.name}: ${error}`;\n logger.error(errorMsg);\n errors.push(errorMsg);\n totalErrors++;\n }\n }\n\n // Handle root files separately\n const rootFileResults = {\n installed: new Set<string>(),\n updated: new Set<string>(),\n skipped: new Set<string>()\n };\n\n /** Per-package root files to augment workspace index */\n const rootFileAugmentations = new Map<string, { rootFilePaths: string[] }>();\n\n for (const resolved of packages) {\n try {\n const categorized = await discoverAndCategorizeFiles(\n resolved.name,\n resolved.version,\n platforms,\n resolved.contentRoot,\n matchedPattern\n );\n const installResult = await installOrSyncRootFiles(\n cwd,\n resolved.name,\n categorized.rootFiles,\n platforms\n );\n\n installResult.created.forEach(file => rootFileResults.installed.add(file));\n installResult.updated.forEach(file => rootFileResults.updated.add(file));\n installResult.skipped.forEach(file => rootFileResults.skipped.add(file));\n\n // Collect root files for index augmentation\n if (!options.dryRun) {\n const rootFilePaths = [...installResult.created, ...installResult.updated];\n if (rootFilePaths.length > 0) {\n rootFileAugmentations.set(resolved.name, { rootFilePaths });\n }\n }\n } catch (error) {\n if (error instanceof UserCancellationError) {\n throw error;\n }\n const errorMsg = `Failed root file install for ${resolved.name}: ${error}`;\n logger.error(errorMsg);\n errors.push(errorMsg);\n totalErrors++;\n }\n }\n\n // Augment workspace index with root files and root copy files (flow-installer writes index before root phase)\n if (!options.dryRun && rootFileAugmentations.size > 0) {\n if (indexWriteCollector) {\n // Defer to collector (parallel install mode)\n for (const [packageName, { rootFilePaths }] of rootFileAugmentations) {\n const files: Record<string, (string | WorkspaceIndexFileMapping)[]> = {};\n for (const p of rootFilePaths) {\n files[p] = [{ target: p, merge: 'composite', keys: [packageName] }];\n }\n indexWriteCollector.recordFileAugmentation({ packageName, files });\n }\n } else {\n try {\n const wsRecord = await readWorkspaceIndex(cwd);\n wsRecord.index.packages = wsRecord.index.packages ?? {};\n for (const [packageName, { rootFilePaths }] of rootFileAugmentations) {\n const entry = wsRecord.index.packages[packageName];\n if (!entry) continue;\n const files = { ...(entry.files ?? {}) };\n for (const p of rootFilePaths) {\n const incoming: WorkspaceIndexFileMapping = { target: p, merge: 'composite', keys: [packageName] };\n const existing = files[p] ?? [];\n files[p] = deduplicateTargets(existing, [incoming]);\n }\n wsRecord.index.packages[packageName] = { ...entry, files };\n }\n await writeWorkspaceIndex(wsRecord);\n logger.debug(`Augmented workspace index with root files for ${rootFileAugmentations.size} package(s)`);\n } catch (error) {\n logger.warn(`Failed to augment workspace index with root files: ${error}`);\n }\n }\n }\n\n // Deduplicate: remove any root files that also appear in allAddedFiles/allUpdatedFiles\n const addedSet = new Set(allAddedFiles);\n const updatedSet = new Set(allUpdatedFiles);\n const dedupedRootInstalled = Array.from(rootFileResults.installed).filter(\n f => !addedSet.has(f)\n );\n const dedupedRootUpdated = Array.from(rootFileResults.updated).filter(\n f => !updatedSet.has(f)\n );\n\n return {\n installedCount: totalInstalled,\n skippedCount: totalSkipped,\n errorCount: totalErrors,\n allAddedFiles,\n errors: errors.length > 0 ? errors : undefined,\n allUpdatedFiles,\n rootFileResults: {\n installed: dedupedRootInstalled,\n updated: dedupedRootUpdated,\n skipped: Array.from(rootFileResults.skipped)\n },\n totalOpenPackageFiles: totalInstalled + totalUpdated,\n namespaced: allNamespacedFiles.length > 0 || undefined,\n namespacedFiles: allNamespacedFiles.length > 0 ? allNamespacedFiles : undefined,\n relocatedFiles: allRelocatedFiles.length > 0 ? allRelocatedFiles : undefined,\n claimedFiles: allClaimedFiles.length > 0 ? allClaimedFiles : undefined\n };\n}\n", "import { join } from 'path';\nimport { resolvePlatformName, type Platform } from '../platforms.js';\nimport { normalizePlatforms } from '../platform/platform-mapper.js';\nimport { detectPlatforms, promptForPlatformSelection } from './package-installation.js';\nimport { getLocalOpenPackageDir } from '../../utils/paths.js';\nimport { parsePackageYml } from '../../utils/package-yml.js';\nimport { FILE_PATTERNS } from '../../constants/index.js';\nimport { logger } from '../../utils/logger.js';\nimport type { OutputPort } from '../ports/output.js';\nimport type { PromptPort } from '../ports/prompt.js';\n\n/**\n * Resolve platforms for an operation.\n * Resolution priority: CLI flag > manifest field > auto-detection > prompt/default\n *\n * The `interactive` flag is typically derived from\n * `InteractionPolicy.canPrompt(PromptTier.Required)` at the call site.\n */\nexport async function resolvePlatforms(\n cwd: string,\n specified: string[] | undefined,\n options: { interactive?: boolean; output?: OutputPort; prompt?: PromptPort } = {}\n): Promise<Platform[]> {\n const canPrompt = options.interactive === true;\n\n // 1. CLI --platforms flag\n const normalized = normalizePlatforms(specified);\n if (normalized && normalized.length > 0) {\n const resolved = normalized.map(name => resolvePlatformName(name));\n const invalidIndex = resolved.findIndex(platform => !platform);\n if (invalidIndex !== -1) {\n throw new Error(`platform ${normalized[invalidIndex]} not found`);\n }\n return resolved as Platform[];\n }\n\n // 2. Manifest platforms: field\n const manifestPlatforms = await readManifestPlatforms(cwd);\n if (manifestPlatforms) return manifestPlatforms;\n\n // 3. Auto-detect\n const auto = await detectPlatforms(cwd);\n if (auto.length > 0) return auto;\n\n // 4. Interactive prompt\n if (canPrompt) {\n const selected = await promptForPlatformSelection(options.output, options.prompt);\n return selected;\n }\n\n // 5. Default to cursor\n return ['cursor'] as Platform[];\n}\n\n/**\n * Read platforms from the workspace manifest, returning null if unavailable.\n * Separates I/O errors (fall through) from validation errors (thrown).\n */\nasync function readManifestPlatforms(cwd: string): Promise<Platform[] | null> {\n let manifest;\n try {\n const opkgDir = getLocalOpenPackageDir(cwd);\n const manifestPath = join(opkgDir, FILE_PATTERNS.OPENPACKAGE_YML);\n manifest = await parsePackageYml(manifestPath);\n } catch {\n // Manifest missing or unreadable \u2014 fall through to auto-detect\n logger.debug('Could not read manifest platforms, falling through to auto-detect');\n return null;\n }\n\n if (!manifest.platforms || manifest.platforms.length === 0) return null;\n\n // Validate outside the try/catch so validation errors propagate\n const resolved = manifest.platforms.map(name => resolvePlatformName(name));\n const invalidIndex = resolved.findIndex(p => !p);\n if (invalidIndex !== -1) {\n throw new Error(`platform ${manifest.platforms[invalidIndex]} in manifest not found`);\n }\n logger.debug('Using manifest platforms:', manifest.platforms);\n return resolved as Platform[];\n}\n", "import { logger } from '../../utils/logger.js';\nimport type { OutputPort } from '../ports/output.js';\nimport type { PromptPort } from '../ports/prompt.js';\nimport { resolveOutput, resolvePrompt } from '../ports/resolve.js';\nimport type { Platform } from '../../types/platform.js';\nimport { getDetectedPlatforms, getPlatformDefinitions } from '../platforms.js';\nimport type { PlatformDefinition } from '../../types/platform.js';\n\n/**\n * Detect existing platforms in the project\n * Wrapper around getDetectedPlatforms that adds debug logging\n */\nexport async function detectPlatforms(targetDir: string): Promise<Platform[]> {\n const detectedPlatforms = await getDetectedPlatforms(targetDir);\n\n if (detectedPlatforms.length > 0) {\n logger.debug(`Auto-detected platforms: ${detectedPlatforms.join(', ')}`);\n }\n\n return detectedPlatforms;\n}\n\n/**\n * Prompt user for platform selection when no platforms are detected\n */\nexport async function promptForPlatformSelection(\n output?: OutputPort,\n prompt?: PromptPort\n): Promise<Platform[]> {\n const out = output ?? resolveOutput();\n const prm = prompt ?? resolvePrompt();\n \n out.step('Platform Detection');\n out.info('No AI development platform detected in this project.');\n\n const choices = Object.values(getPlatformDefinitions()).map((platform: PlatformDefinition) => ({\n title: platform.name,\n value: platform.id\n }));\n\n const selected = await prm.select<string>(\n 'Which platform are you using for AI-assisted development?',\n choices,\n 'Use arrow keys to navigate, Enter to select'\n );\n\n return selected ? [selected as Platform] : [];\n}\n", "import type { InstallationContext } from '../context.js';\nimport { performIndexBasedInstallationPhases } from '../../operations/installation-executor.js';\nimport { resolvePlatforms } from '../../platform-resolution.js';\nimport { logger } from '../../../../utils/logger.js';\nimport { splitPackageNameForTelemetry } from '../../../../utils/plugin-naming.js';\nimport { resolveOutput, resolvePrompt } from '../../../ports/resolve.js';\nimport { PromptTier } from '../../../interaction-policy.js';\n\nimport type { RelocatedFile } from '../../conflicts/file-conflict-resolver.js';\n\nexport interface ExecutionResult {\n installedCount: number;\n skippedCount: number;\n errorCount: number;\n allAddedFiles: string[];\n allUpdatedFiles: string[];\n rootFileResults: { installed: string[]; updated: string[]; skipped: string[] };\n hadErrors: boolean;\n installedAnyFiles: boolean;\n errors?: string[];\n /** True when namespace conflict resolution was triggered for any package */\n namespaced?: boolean;\n /** Paths of files that were installed/updated under namespace conflict resolution */\n namespacedFiles?: string[];\n /** Files that were physically relocated on disk during namespace resolution */\n relocatedFiles?: RelocatedFile[];\n /** Absolute paths of files that were auto-claimed (content identical, unowned on disk) */\n claimedFiles?: string[];\n}\n\n/**\n * Execute installation phase\n */\nexport async function executeInstallationPhase(\n ctx: InstallationContext\n): Promise<ExecutionResult> {\n // Resolve platforms if not already set (orchestrator preflight sets for bulk/single)\n if (ctx.platforms.length === 0) {\n const canPrompt = ctx.execution.interactionPolicy?.canPrompt(PromptTier.Required) ?? false;\n ctx.platforms = await resolvePlatforms(\n ctx.targetDir,\n ctx.options.platforms,\n { interactive: canPrompt, output: resolveOutput(ctx.execution), prompt: resolvePrompt(ctx.execution) }\n );\n }\n\n // Get conflict result from context\n const conflictResult = ctx.conflictResult;\n\n // Execute installation\n const outcome = await performIndexBasedInstallationPhases({\n cwd: ctx.targetDir,\n packages: ctx.resolvedPackages,\n platforms: ctx.platforms,\n conflictResult,\n options: ctx.options,\n targetDir: ctx.targetDir,\n matchedPattern: ctx.matchedPattern, // Phase 4: Pass matched pattern\n prompt: ctx.execution?.prompt,\n indexWriteCollector: ctx.execution?.indexWriteCollector,\n sharedOwnershipContext: ctx.execution?.sharedOwnershipContext,\n installScope: ctx.installScope\n });\n \n // Track errors in context\n outcome.errors?.forEach(e => ctx.errors.push(e));\n \n const hadErrors = outcome.errorCount > 0;\n const installedAnyFiles =\n outcome.allAddedFiles.length > 0 ||\n outcome.allUpdatedFiles.length > 0 ||\n outcome.rootFileResults.installed.length > 0 ||\n outcome.rootFileResults.updated.length > 0;\n \n // Record telemetry for successful installations\n if (installedAnyFiles && ctx.execution.telemetryCollector) {\n for (const pkg of ctx.resolvedPackages) {\n // Split package name into base name and resource path\n // This handles cases like \"gh@user/repo/agents/designer\" -> base: \"gh@user/repo\", path: \"agents/designer\"\n const { baseName, resourcePath: nameResourcePath } = splitPackageNameForTelemetry(pkg.name);\n \n // Determine the actual resource path to send\n // Priority: ctx.matchedPattern > nameResourcePath (extracted from package name)\n const resourcePath = ctx.matchedPattern || nameResourcePath;\n \n // Determine resource type\n let resourceType: string | undefined;\n if (pkg.marketplaceMetadata) {\n resourceType = 'plugin';\n } else if (resourcePath) {\n // Check if this was an agent or skill based on resource path\n if (resourcePath.includes('agent')) {\n resourceType = 'agent';\n } else if (resourcePath.includes('skill')) {\n resourceType = 'skill';\n }\n }\n \n // Extract resource name from resource path or package name\n const resourceName = resourcePath \n ? resourcePath.split('/').pop()?.replace(/\\.(md|json)$/, '') || pkg.name.split('/').pop() || pkg.name\n : pkg.name.split('/').pop() || pkg.name;\n \n ctx.execution.telemetryCollector.recordInstall({\n packageName: baseName, // Send base package name (e.g., \"gh@user/repo\")\n version: pkg.version,\n resourcePath, // Send resource path separately (e.g., \"agents/designer\")\n resourceType,\n resourceName,\n marketplaceName: pkg.marketplaceMetadata?.pluginName,\n pluginName: pkg.marketplaceMetadata?.pluginName\n });\n }\n }\n \n return {\n ...outcome,\n hadErrors,\n installedAnyFiles,\n errors: outcome.errors\n };\n}\n\n", "import type { InstallationContext } from '../context.js';\nimport { addPackageToYml } from '../../../package-management.js';\nimport { formatPathForYaml } from '../../../../utils/path-resolution.js';\nimport { logger } from '../../../../utils/logger.js';\n\n/**\n * Update manifest phase (openpackage.yml)\n */\nexport async function updateManifestPhase(ctx: InstallationContext): Promise<void> {\n const mainPackage = ctx.resolvedPackages.find(pkg => pkg.isRoot);\n\n if (!mainPackage) {\n logger.warn(`No root package found in resolved packages, skipping manifest update`);\n return;\n }\n\n try {\n // Determine fields based on source type\n const fields = buildManifestFields(ctx, mainPackage);\n\n await addPackageToYml(\n ctx.targetDir,\n ctx.source.packageName,\n mainPackage.version,\n ctx.options.dev ?? false,\n fields.range,\n fields.force,\n fields.base,\n fields.gitUrl,\n fields.gitRef,\n fields.resourcePath,\n );\n\n logger.info(`Updated manifest for ${ctx.source.packageName}`);\n\n } catch (error) {\n logger.warn(`Failed to update manifest: ${error}`);\n // Non-fatal - installation succeeded even if manifest update failed\n }\n}\n\nexport function buildManifestFields(ctx: InstallationContext, mainPackage: any) {\n const fields: any = {\n range: undefined,\n force: true,\n base: undefined, // path from source root to package root\n resourcePath: undefined, // resource selection within package\n gitUrl: undefined,\n gitRef: undefined,\n };\n\n // Check for git source override first (for marketplace plugins)\n // This allows path-based loading with git-based manifest recording\n if (ctx.source.gitSourceOverride) {\n fields.gitUrl = ctx.source.gitSourceOverride.gitUrl;\n fields.gitRef = ctx.source.gitSourceOverride.gitRef;\n // Split: gitPath \u2192 base (subdirectory), resourcePath \u2192 path (resource selection)\n fields.base = ctx.source.gitSourceOverride.gitPath;\n fields.resourcePath = ctx.source.resourcePath;\n return fields;\n }\n\n // Mutable source override: auto-discovered workspace/global packages \u2192 name-only\n // The resolved path goes to the lockfile, not the manifest.\n if (ctx.source.mutableSourceOverride) {\n // Name-only entry \u2014 return empty fields\n return fields;\n }\n\n // Record base field for reproducible installs\n if (ctx.baseRelative) {\n fields.base = ctx.baseRelative;\n }\n\n switch (ctx.source.type) {\n case 'registry':\n // Registry packages get version range\n fields.range = ctx.source.version;\n break;\n\n case 'path':\n // Explicit path packages: source location \u2192 base field\n fields.base = formatPathForYaml(ctx.source.localPath || '', ctx.targetDir);\n break;\n\n case 'git':\n // Git packages: url + base (subdirectory) + resourcePath (resource selection)\n fields.gitUrl = ctx.source.gitUrl;\n fields.gitRef = ctx.source.gitRef;\n fields.base = ctx.source.gitPath || undefined; // subdirectory (omit if repo root)\n fields.resourcePath = ctx.source.resourcePath; // resource selection (omit if full)\n break;\n\n case 'workspace':\n // Workspace (apply) doesn't update manifest\n break;\n }\n\n return fields;\n}\n", "import type { PackageRemoteResolutionOutcome } from './types.js';\nimport type { RelocatedFile } from './conflicts/file-conflict-resolver.js';\nimport type { OutputPort } from '../ports/output.js';\nimport { resolveOutput } from '../ports/resolve.js';\nimport { extractRemoteErrorReason } from '../../utils/error-reasons.js';\nimport { formatPathForDisplay, getTreeConnector } from '../../utils/formatters.js';\n\n/**\n * Data required to render the install report.\n *\n * Replaces the previous 13-positional-parameter signature with a single\n * options object for clarity and extensibility.\n */\nexport interface InstallReportData {\n packageName: string;\n resolvedPackages: any[];\n platformResult: { platforms: string[]; created: string[] };\n options: any;\n mainPackage?: any;\n installedFiles?: string[];\n updatedFiles?: string[];\n rootFileResults?: { installed: string[]; updated: string[]; skipped: string[] };\n missingPackages?: string[];\n missingPackageOutcomes?: Record<string, PackageRemoteResolutionOutcome>;\n errorCount?: number;\n errors?: string[];\n /** When true, show \"dependency recorded in your manifest\" for 0-install success. Defaults to true. */\n isDependencyInstall?: boolean;\n /** True when namespace conflict resolution was triggered */\n namespaced?: boolean;\n /** Paths of files that were installed/updated under namespace conflict resolution */\n namespacedFiles?: string[];\n /** Files that were physically relocated on disk during namespace resolution */\n relocatedFiles?: RelocatedFile[];\n /** Absolute paths of files that were auto-claimed (content identical, unowned on disk) */\n claimedFiles?: string[];\n /** When true, use compact note-based display for file lists (interactive mode) */\n interactive?: boolean;\n /** Package names that were replaced during subsumption resolution (upgrade from resource-scoped installs) */\n replacedResources?: string[];\n /** When true, skip the main success/error header (used by grouped reports where per-resource spinners already showed status) */\n suppressHeader?: boolean;\n}\n\n// ============================================================================\n// Helper: render a list of items with correct tree connectors\n// ============================================================================\n\nfunction renderTreeList(items: string[], output: OutputPort, indent: string = ' '): void {\n for (let i = 0; i < items.length; i++) {\n const connector = getTreeConnector(i === items.length - 1);\n output.info(`${indent}${connector}${items[i]}`);\n }\n}\n\n/**\n * Render a file list as a note box (interactive) or tree list (non-interactive).\n * Matches the pattern used in add.ts and remove.ts.\n */\nfunction renderFileList(\n items: string[],\n title: string,\n output: OutputPort,\n interactive: boolean\n): void {\n if (interactive) {\n const maxDisplay = 10;\n const displayItems = items.slice(0, maxDisplay);\n const more = items.length > maxDisplay ? `\\n... and ${items.length - maxDisplay} more` : '';\n output.note(displayItems.join('\\n') + more, title);\n } else {\n output.success(title);\n renderTreeList(items, output);\n }\n}\n\n// ============================================================================\n// Main display function\n// ============================================================================\n\nexport function displayInstallationResults(data: InstallReportData, output: OutputPort = resolveOutput()): void {\n const {\n packageName,\n resolvedPackages,\n platformResult,\n mainPackage,\n installedFiles,\n updatedFiles,\n rootFileResults,\n missingPackages,\n missingPackageOutcomes,\n errorCount,\n errors,\n isDependencyInstall = true,\n namespacedFiles,\n claimedFiles,\n relocatedFiles,\n interactive = false,\n replacedResources,\n } = data;\n\n const namespacedSet = new Set(namespacedFiles ?? []);\n const claimedSet = new Set(claimedFiles ?? []);\n const dim = (text: string) => `\\x1b[2m${text}\\x1b[0m`;\n\n // Check if installation actually succeeded\n const hadErrors = (errorCount && errorCount > 0) || false;\n const installedAnyFiles = (installedFiles && installedFiles.length > 0) ||\n (updatedFiles && updatedFiles.length > 0) ||\n (rootFileResults && (rootFileResults.installed.length > 0 || rootFileResults.updated.length > 0));\n\n if (hadErrors && !installedAnyFiles) {\n // Complete failure - nothing was installed\n output.error(`Failed to install ${packageName}${mainPackage ? `@${mainPackage.version}` : ''}`);\n if (errors && errors.length > 0) {\n output.error(`Installation errors:`);\n for (const error of errors) {\n output.info(` \u2022 ${error}`);\n }\n }\n return;\n }\n\n // Handle empty directory/filtered installs (0 files but still success)\n if (!installedAnyFiles && !hadErrors) {\n let summaryText = `Succeeded ${packageName}`;\n if (mainPackage) {\n summaryText += `@${mainPackage.version}`;\n }\n summaryText += ' with 0 installs';\n output.success(`${summaryText}`);\n output.info(` No files matched. The package directory may be empty or filters excluded all content.`);\n if (isDependencyInstall) {\n output.info(` The dependency has been recorded in your manifest.`);\n }\n return;\n }\n\n // \u2500\u2500 Main success header \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n if (!data.suppressHeader) {\n let headerText = `Installed ${packageName}`;\n if (mainPackage?.version) {\n headerText += `@${mainPackage.version}`;\n }\n output.success(headerText);\n }\n\n // \u2500\u2500 Dependency packages \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n const dependencyPackages = resolvedPackages.filter(f => !f.isRoot);\n if (dependencyPackages.length > 0) {\n output.success(`Installed dependencies: ${dependencyPackages.length}`);\n const depLines = dependencyPackages.map(dep => {\n const packageSpecifier =\n typeof dep.name === 'string' && (dep.name.startsWith('@') || dep.name.startsWith('gh@'))\n ? dep.name\n : `@${dep.name}`;\n return `${packageSpecifier}@${dep.version}`;\n });\n renderTreeList(depLines, output);\n }\n if (resolvedPackages.length > 1) {\n output.success(`Total packages processed: ${resolvedPackages.length}`);\n }\n\n // \u2500\u2500 Installed files \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n if (installedFiles && installedFiles.length > 0) {\n const header = `Installed files: ${installedFiles.length}`;\n const sortedFiles = [...installedFiles].sort((a, b) => a.localeCompare(b));\n renderFileList(sortedFiles.map(f => {\n const display = formatPathForDisplay(f);\n return namespacedSet.has(f) ? `${display} ${dim('[namespaced]')}`\n : claimedSet.has(f) ? `${display} ${dim('[claimed]')}`\n : display;\n }), header, output, interactive);\n }\n\n // \u2500\u2500 Updated files \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n if (updatedFiles && updatedFiles.length > 0) {\n const header = `Updated files: ${updatedFiles.length}`;\n const sortedFiles = [...updatedFiles].sort((a, b) => a.localeCompare(b));\n renderFileList(sortedFiles.map(f => {\n const display = formatPathForDisplay(f);\n return namespacedSet.has(f) ? `${display} ${dim('[namespaced]')}`\n : claimedSet.has(f) ? `${display} ${dim('[claimed]')}`\n : display;\n }), header, output, interactive);\n }\n\n // \u2500\u2500 Relocated files (namespace-triggered moves) \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n if (relocatedFiles && relocatedFiles.length > 0) {\n const lines = relocatedFiles.map(\n r => `${formatPathForDisplay(r.from)} \u2192 ${formatPathForDisplay(r.to)}`\n );\n renderFileList(lines, `Relocated files: ${relocatedFiles.length}`, output, interactive);\n }\n\n // \u2500\u2500 Replaced resources (subsumption upgrade) \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n if (replacedResources && replacedResources.length > 0) {\n const count = replacedResources.length;\n const header = `Replaced ${count} previously installed resource${count === 1 ? '' : 's'}:`;\n if (interactive) {\n output.note(replacedResources.join('\\n'), header);\n } else {\n output.success(header);\n renderTreeList(replacedResources, output);\n }\n }\n\n // \u2500\u2500 Root files \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n if (rootFileResults) {\n const totalRootFiles = rootFileResults.installed.length + rootFileResults.updated.length;\n if (totalRootFiles > 0) {\n const rootLines: string[] = [];\n if (rootFileResults.installed.length > 0) {\n const sortedInstalled = [...rootFileResults.installed].sort((a, b) => a.localeCompare(b));\n for (const file of sortedInstalled) {\n rootLines.push(`${formatPathForDisplay(file)} (created)`);\n }\n }\n if (rootFileResults.updated.length > 0) {\n const sortedUpdated = [...rootFileResults.updated].sort((a, b) => a.localeCompare(b));\n for (const file of sortedUpdated) {\n rootLines.push(`${formatPathForDisplay(file)} (updated)`);\n }\n }\n renderFileList(rootLines, `Root files: ${totalRootFiles} file(s)`, output, interactive);\n }\n }\n\n // \u2500\u2500 Platform directories \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n if (platformResult.created.length > 0) {\n output.success(`Created platform directories: ${platformResult.created.join(', ')}`);\n }\n\n // \u2500\u2500 Partial failure: errors during an otherwise-successful install \u2500\u2500\u2500\u2500\n if (hadErrors && errors && errors.length > 0) {\n output.warn(`Errors during installation: ${errors.length}`);\n renderTreeList(errors, output);\n }\n\n // \u2500\u2500 Missing dependencies \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n if (missingPackages && missingPackages.length > 0) {\n output.warn(`Missing dependencies detected:`);\n for (const missing of missingPackages) {\n const reasonLabel = formatMissingDependencyReason(missingPackageOutcomes?.[missing]);\n output.info(` \u2022 ${missing} (${reasonLabel})`);\n }\n output.info(`\uD83D\uDCA1 To resolve missing dependencies:`);\n output.info(` \u2022 Create locally: opkg new <package-name>`);\n output.info(` \u2022 Install from registry/git: opkg install ${missingPackages.join(' ')}`);\n output.info(` \u2022 Remove from openpackage.yml`);\n output.info('');\n }\n}\n\nfunction formatMissingDependencyReason(outcome?: PackageRemoteResolutionOutcome): string {\n if (!outcome) {\n return 'not found in registry';\n }\n\n switch (outcome.reason) {\n case 'not-found':\n return 'not found in remote registry';\n case 'access-denied':\n return 'access denied';\n case 'network':\n return 'network error';\n case 'integrity':\n return 'integrity check failed';\n default:\n return extractRemoteErrorReason(outcome.message || 'unknown error');\n }\n}\n", "import type { CommandResult } from '../../../../types/index.js';\nimport type { InstallationContext } from '../context.js';\nimport type { ExecutionResult } from './execute.js';\nimport type { InstallReportData } from '../../install-reporting.js';\nimport { displayInstallationResults } from '../../install-reporting.js';\nimport { getInstallRootDir } from '../../../../utils/paths.js';\nimport { resolveOutput } from '../../../ports/resolve.js';\n\n/**\n * Report results phase\n */\nexport async function reportResultsPhase(\n ctx: InstallationContext,\n installResult: ExecutionResult\n): Promise<CommandResult & { _reportData?: InstallReportData }> {\n const mainPackage = ctx.resolvedPackages.find(pkg => pkg.isRoot);\n\n // Display results (only show \"dependency recorded\" for dependency installs, not workspace-root)\n const isDependencyInstall = ctx.source.type !== 'workspace';\n const reportData: InstallReportData = {\n packageName: ctx.source.packageName,\n resolvedPackages: ctx.resolvedPackages,\n platformResult: { platforms: ctx.platforms, created: [] },\n options: ctx.options,\n mainPackage,\n installedFiles: installResult.allAddedFiles,\n updatedFiles: installResult.allUpdatedFiles,\n rootFileResults: installResult.rootFileResults,\n missingPackages: [],\n missingPackageOutcomes: {},\n errorCount: installResult.errorCount,\n errors: installResult.errors,\n isDependencyInstall,\n namespaced: installResult.namespaced,\n namespacedFiles: installResult.namespacedFiles,\n relocatedFiles: installResult.relocatedFiles,\n claimedFiles: installResult.claimedFiles,\n interactive: ctx.execution.outputMode === 'rich',\n replacedResources: ctx._replacedResources,\n };\n\n if (ctx._deferredReport) {\n // Caller will merge and display (e.g. runMultiContextPipeline groupReport)\n return {\n success: true,\n data: {\n packageName: ctx.source.packageName,\n targetDir: getInstallRootDir(ctx.targetDir),\n resolvedPackages: ctx.resolvedPackages,\n totalPackages: ctx.resolvedPackages.length,\n installed: installResult.installedCount,\n skipped: installResult.skippedCount,\n totalOpenPackageFiles: installResult.installedCount + installResult.allUpdatedFiles.length\n },\n warnings: ctx.warnings.length > 0 ? Array.from(new Set(ctx.warnings)) : undefined,\n _reportData: reportData\n };\n }\n\n displayInstallationResults(reportData, resolveOutput(ctx.execution));\n\n return {\n success: true,\n data: {\n packageName: ctx.source.packageName,\n targetDir: getInstallRootDir(ctx.targetDir),\n resolvedPackages: ctx.resolvedPackages,\n totalPackages: ctx.resolvedPackages.length,\n installed: installResult.installedCount,\n skipped: installResult.skippedCount,\n totalOpenPackageFiles: installResult.installedCount + installResult.allUpdatedFiles.length\n },\n warnings: ctx.warnings.length > 0 ? Array.from(new Set(ctx.warnings)) : undefined\n };\n}\n", "import path from 'path';\n\nimport type { CommandResult, UninstallOptions, ExecutionContext } from '../../types/index.js';\nimport { ValidationError } from '../../utils/errors.js';\nimport { getLocalOpenPackageDir, getLocalPackageYmlPath } from '../../utils/paths.js';\nimport { readWorkspaceIndex, writeWorkspaceIndex } from '../../utils/workspace-index-yml.js';\nimport { healAndPersistIndex } from '../../utils/workspace-index-healer.js';\nimport { removeWorkspaceIndexEntry, removeWorkspaceIndexFileKeys } from '../../utils/workspace-index-ownership.js';\nimport { processRootFileRemovals } from '../platform/root-file-uninstaller.js';\nimport { exists, remove, walkFiles } from '../../utils/fs.js';\nimport { isDirKey } from '../../utils/package-index-yml.js';\nimport { removePackageFromOpenpackageYml } from '../package-management.js';\nimport { getPlatformRootFileNames } from '../platform/platform-root-files.js';\nimport { getAllPlatforms } from '../platforms.js';\nimport { logger } from '../../utils/logger.js';\nimport { removeFileMapping } from './flow-aware-uninstaller.js';\nimport { getTargetPath, findPackageInIndex } from '../../utils/workspace-index-helpers.js';\nimport { getEmbeddedChildren } from '../../utils/qualified-name.js';\nimport { buildPreservedDirectoriesSet } from '../platform/directory-preservation.js';\nimport { cleanupEmptyParents } from '../../utils/cleanup-empty-parents.js';\nimport { removeLockfileEntry } from '../../utils/lockfile-yml.js';\nimport type { WorkspaceIndexFileMapping } from '../../types/workspace-index.js';\nimport type { OutputPort } from '../ports/output.js';\nimport { resolveOutput } from '../ports/resolve.js';\n\ninterface ProcessFileMappingsOptions {\n dryRun?: boolean;\n}\n\ninterface ProcessFileMappingsResult {\n removed: string[];\n updated: string[];\n}\n\nfunction isRootFileKey(key: string, rootNames: Set<string>): boolean {\n const normalized = key.replace(/\\\\/g, '/');\n return rootNames.has(normalized);\n}\n\nasync function processFileMappings(\n filesMapping: Record<string, (string | WorkspaceIndexFileMapping)[]>,\n targetDir: string,\n packageName: string,\n rootNames: Set<string>,\n options: ProcessFileMappingsOptions = {}\n): Promise<ProcessFileMappingsResult> {\n const removed: string[] = [];\n const updated: string[] = [];\n const seenPaths = new Set<string>();\n\n for (const [rawKey, mappings] of Object.entries(filesMapping || {})) {\n if (!Array.isArray(mappings) || mappings.length === 0) continue;\n\n const isDir = isDirKey(rawKey);\n\n if (isDir) {\n for (const mapping of mappings) {\n const targetPath = getTargetPath(mapping);\n const absDir = path.join(targetDir, targetPath);\n if (!(await exists(absDir))) continue;\n\n if (options.dryRun) {\n for await (const filePath of walkFiles(absDir)) {\n if (!seenPaths.has(filePath)) {\n seenPaths.add(filePath);\n removed.push(filePath);\n }\n }\n } else {\n const result = await removeFileMapping(targetDir, mapping, packageName);\n removed.push(...result.removed);\n updated.push(...result.updated);\n }\n }\n continue;\n }\n\n if (isRootFileKey(rawKey, rootNames)) {\n continue;\n }\n\n for (const mapping of mappings) {\n const targetPath = getTargetPath(mapping);\n const absPath = path.join(targetDir, targetPath);\n\n if (options.dryRun) {\n if (!seenPaths.has(absPath)) {\n seenPaths.add(absPath);\n removed.push(absPath);\n }\n } else {\n const result = await removeFileMapping(targetDir, mapping, packageName);\n removed.push(...result.removed);\n updated.push(...result.updated);\n }\n }\n }\n\n return { removed, updated };\n}\n\nexport interface UninstallPipelineResult {\n removedFiles: string[];\n rootFilesUpdated: string[];\n}\n\nexport async function runUninstallPipeline(\n packageName: string,\n options: UninstallOptions = {},\n execContext: ExecutionContext\n): Promise<CommandResult<UninstallPipelineResult>> {\n // Use targetDir for uninstall operations\n const targetDir = execContext.targetDir;\n const openpkgDir = getLocalOpenPackageDir(targetDir);\n const manifestPath = getLocalPackageYmlPath(targetDir);\n\n if (!(await exists(openpkgDir)) || !(await exists(manifestPath))) {\n throw new ValidationError(\n `No .openpackage/openpackage.yml found in ${targetDir}.`\n );\n }\n\n // Look up package with multi-strategy matching (exact, case-insensitive, normalized, resource name)\n const { index, path: indexPath } = await readWorkspaceIndex(targetDir);\n\n // Self-heal stale index entries before uninstall\n await healAndPersistIndex(targetDir, index, indexPath);\n\n const match = findPackageInIndex(packageName, index.packages || {});\n\n if (!match) {\n return { success: false, error: `Package '${packageName}' not found in workspace index.` };\n }\n\n const resolvedName = match.key;\n const pkgEntry = match.entry;\n\n // Check for embedded children\n const childKeys = getEmbeddedChildren(index.packages || {}, resolvedName);\n\n if (childKeys.length > 0 && !options.recursive) {\n // Warn about orphaned children\n const childList = childKeys.map(k => ` - ${k}`).join('\\n');\n logger.warn(\n `Package '${resolvedName}' has ${childKeys.length} embedded package(s):\\n${childList}\\n` +\n `Use --recursive to uninstall them together.`\n );\n }\n\n if (options.recursive && childKeys.length > 0) {\n const rootNames = getPlatformRootFileNames(getAllPlatforms(undefined, targetDir), targetDir);\n // Uninstall children first (reverse order for clean dependency removal)\n for (const childKey of childKeys.reverse()) {\n const childMatch = findPackageInIndex(childKey, index.packages || {});\n if (!childMatch) continue;\n\n const { removed: childDeleted } = await processFileMappings(\n childMatch.entry.files || {},\n targetDir,\n childMatch.key,\n rootNames,\n { dryRun: false }\n );\n\n await processRootFileRemovals(targetDir, [childMatch.key]);\n removeWorkspaceIndexEntry(index, childMatch.key);\n await removeLockfileEntry(targetDir, childMatch.key);\n await removePackageFromOpenpackageYml(targetDir, childMatch.key);\n\n logger.info(`Uninstalled embedded package ${childMatch.key}: removed ${childDeleted.length} files`);\n }\n }\n\n const rootNames = getPlatformRootFileNames(getAllPlatforms(undefined, targetDir), targetDir);\n\n if (options.dryRun) {\n const out = resolveOutput(execContext);\n const plannedRemovals = await processFileMappings(\n pkgEntry.files || {},\n targetDir,\n resolvedName,\n rootNames,\n { dryRun: true }\n );\n const rootPlan = await processRootFileRemovals(targetDir, [resolvedName], { dryRun: true });\n out.info(`(dry-run) Would remove ${plannedRemovals.removed.length} files for ${resolvedName}`);\n for (const filePath of plannedRemovals.removed) {\n out.info(` - ${filePath}`);\n }\n if (rootPlan.updated.length > 0) {\n out.info(`Root files to update:`);\n rootPlan.updated.forEach(f => out.info(` - ${f}`));\n }\n return {\n success: true,\n data: {\n removedFiles: plannedRemovals.removed,\n rootFilesUpdated: rootPlan.updated\n }\n };\n }\n\n const { removed: deleted, updated } = await processFileMappings(\n pkgEntry.files || {},\n targetDir,\n resolvedName,\n rootNames,\n { dryRun: false }\n );\n\n const rootResult = await processRootFileRemovals(targetDir, [resolvedName]);\n\n // Update workspace index (migration will happen on write)\n removeWorkspaceIndexEntry(index, resolvedName);\n await writeWorkspaceIndex({ path: indexPath, index });\n\n // Clean up lockfile entry\n await removeLockfileEntry(targetDir, resolvedName);\n\n // Update openpackage.yml (migration will happen on write)\n await removePackageFromOpenpackageYml(targetDir, resolvedName);\n\n // Cleanup empty directories (preserve platform roots from detection patterns)\n const preservedDirs = buildPreservedDirectoriesSet(targetDir);\n // Convert relative paths to absolute paths for cleanup\n const deletedAbsolutePaths = deleted.map(relativePath => path.join(targetDir, relativePath));\n await cleanupEmptyParents(targetDir, deletedAbsolutePaths, preservedDirs);\n\n logger.info(`Uninstalled ${resolvedName}: removed ${deleted.length} files, updated ${updated.length} merged files`);\n\n return {\n success: true,\n data: {\n removedFiles: deleted,\n rootFilesUpdated: [...rootResult.updated, ...updated]\n }\n };\n}\n\nexport async function runSelectiveUninstallPipeline(\n packageName: string,\n sourceKeysToRemove: Set<string>,\n options: UninstallOptions = {},\n execContext: ExecutionContext\n): Promise<CommandResult<UninstallPipelineResult>> {\n const targetDir = execContext.targetDir;\n const openpkgDir = getLocalOpenPackageDir(targetDir);\n const manifestPath = getLocalPackageYmlPath(targetDir);\n\n if (!(await exists(openpkgDir)) || !(await exists(manifestPath))) {\n throw new ValidationError(\n `No .openpackage/openpackage.yml found in ${targetDir}.`\n );\n }\n\n const { index, path: indexPath } = await readWorkspaceIndex(targetDir);\n const match = findPackageInIndex(packageName, index.packages || {});\n\n if (!match) {\n return { success: false, error: `Package '${packageName}' not found in workspace index.` };\n }\n\n const resolvedName = match.key;\n const pkgEntry = match.entry;\n\n const filteredFiles: Record<string, (string | WorkspaceIndexFileMapping)[]> = {};\n for (const key of sourceKeysToRemove) {\n if (pkgEntry.files[key]) {\n filteredFiles[key] = pkgEntry.files[key];\n }\n }\n\n const rootNames = getPlatformRootFileNames(getAllPlatforms(undefined, targetDir), targetDir);\n\n if (options.dryRun) {\n const out = resolveOutput(execContext);\n const plannedRemovals = await processFileMappings(\n filteredFiles,\n targetDir,\n resolvedName,\n rootNames,\n { dryRun: true }\n );\n out.info(`(dry-run) Would remove ${plannedRemovals.removed.length} files for ${resolvedName}`);\n for (const filePath of plannedRemovals.removed) {\n out.info(` - ${filePath}`);\n }\n return {\n success: true,\n data: {\n removedFiles: plannedRemovals.removed,\n rootFilesUpdated: []\n }\n };\n }\n\n const { removed: deleted, updated } = await processFileMappings(\n filteredFiles,\n targetDir,\n resolvedName,\n rootNames,\n { dryRun: false }\n );\n\n removeWorkspaceIndexFileKeys(index, resolvedName, sourceKeysToRemove);\n await writeWorkspaceIndex({ path: indexPath, index });\n\n // Clean lockfile if package has no files remaining\n const updatedEntry = index.packages[resolvedName];\n if (!updatedEntry || Object.keys(updatedEntry.files ?? {}).length === 0) {\n await removeLockfileEntry(targetDir, resolvedName);\n }\n\n const preservedDirs = buildPreservedDirectoriesSet(targetDir);\n const deletedAbsolutePaths = deleted.map(relativePath => path.join(targetDir, relativePath));\n await cleanupEmptyParents(targetDir, deletedAbsolutePaths, preservedDirs);\n\n logger.info(`Selectively uninstalled from ${resolvedName}: removed ${deleted.length} files, updated ${updated.length} merged files`);\n\n return {\n success: true,\n data: {\n removedFiles: deleted,\n rootFilesUpdated: updated\n }\n };\n}\n", "/**\n * Workspace Index Healer\n *\n * Validates workspace index entries against the filesystem and removes\n * stale mappings (files that no longer exist on disk). This self-healing\n * prevents commands from failing on phantom file references.\n */\n\nimport path from 'path';\n\nimport { exists } from './fs.js';\nimport { getTargetPath } from './workspace-index-helpers.js';\nimport { writeWorkspaceIndex, getWorkspaceIndexPath } from './workspace-index-yml.js';\nimport type { WorkspaceIndex } from '../types/workspace-index.js';\nimport { logger } from './logger.js';\n\nexport interface HealResult {\n healed: boolean;\n removedMappings: number;\n}\n\n/**\n * Validate workspace index file mappings against disk and remove stale entries.\n *\n * For each package, iterates file mappings and checks if target files exist.\n * Removes mappings whose targets are missing. If a source key's entire target\n * array becomes empty, deletes the source key. Preserves package entries even\n * if all files are gone (keeps path/version metadata for reinstall hints).\n *\n * Mutates the index in place and returns whether any healing occurred.\n *\n * @param targetDir - Workspace root directory\n * @param index - Workspace index (mutated in place)\n * @param packageName - If specified, only heal this package's mappings\n */\nexport async function validateAndHeal(\n targetDir: string,\n index: WorkspaceIndex,\n packageName?: string,\n): Promise<HealResult> {\n let removedMappings = 0;\n\n const packages = packageName\n ? (index.packages[packageName] ? { [packageName]: index.packages[packageName] } : {})\n : index.packages;\n\n for (const pkg of Object.values(packages)) {\n if (!pkg.files) continue;\n\n for (const [sourceKey, targets] of Object.entries(pkg.files)) {\n if (!Array.isArray(targets) || targets.length === 0) continue;\n\n const kept = [];\n for (const mapping of targets) {\n const targetPath = getTargetPath(mapping);\n const absTarget = path.join(targetDir, targetPath);\n if (await exists(absTarget)) {\n kept.push(mapping);\n } else {\n removedMappings++;\n logger.debug(`Healed stale mapping: ${targetPath} (file missing from disk)`);\n }\n }\n\n if (kept.length === 0) {\n delete pkg.files[sourceKey];\n } else if (kept.length < targets.length) {\n pkg.files[sourceKey] = kept;\n }\n }\n }\n\n return { healed: removedMappings > 0, removedMappings };\n}\n\n/**\n * Convenience: validate, heal, and persist the index in one call.\n * Best-effort \u2014 logs and swallows errors so callers don't need try/catch.\n *\n * @returns The heal result, or `{ healed: false, removedMappings: 0 }` on error.\n */\nexport async function healAndPersistIndex(\n targetDir: string,\n index: WorkspaceIndex,\n indexPath: string,\n packageName?: string,\n): Promise<HealResult> {\n try {\n const result = await validateAndHeal(targetDir, index, packageName);\n if (result.healed) {\n await writeWorkspaceIndex({ path: indexPath, index });\n }\n return result;\n } catch (error) {\n logger.warn(`Index healing failed: ${error}`);\n return { healed: false, removedMappings: 0 };\n }\n}\n", "import { join } from 'path';\nimport { getTargetPath } from './workspace-index-helpers.js';\n\nimport type { WorkspaceIndex, WorkspaceIndexPackage } from '../types/workspace-index.js';\nimport { normalizePathForProcessing } from './path-normalization.js';\nimport { exists, walkFiles } from './fs.js';\n\nexport interface WorkspaceConflictOwner {\n packageName: string;\n key: string;\n type: 'file' | 'dir';\n}\n\nexport interface WorkspaceOwnershipContext {\n /**\n * Directory-key owners (key ends with \"/\").\n * Each key may have multiple owners, though consumers typically expect none or one.\n */\n dirKeyOwners: Map<string, WorkspaceConflictOwner[]>;\n /**\n * Concrete workspace path \u2192 owning package.\n */\n installedPathOwners: Map<string, WorkspaceConflictOwner>;\n}\n\nexport function getWorkspaceIndexEntry(\n index: WorkspaceIndex,\n packageName: string\n): WorkspaceIndexPackage | undefined {\n return index.packages?.[packageName];\n}\n\nexport function upsertWorkspaceIndexEntry(\n index: WorkspaceIndex,\n packageName: string,\n entry: WorkspaceIndexPackage\n): void {\n if (!index.packages) {\n index.packages = {};\n }\n index.packages[packageName] = entry;\n}\n\nexport function removeWorkspaceIndexEntry(index: WorkspaceIndex, packageName: string): void {\n if (index.packages && index.packages[packageName]) {\n delete index.packages[packageName];\n }\n}\n\nexport function removeWorkspaceIndexFileKeys(\n index: WorkspaceIndex,\n packageName: string,\n sourceKeysToRemove: Set<string>\n): void {\n const pkg = index.packages?.[packageName];\n if (!pkg) return;\n\n for (const key of sourceKeysToRemove) {\n delete pkg.files[key];\n }\n\n if (Object.keys(pkg.files).length === 0) {\n delete index.packages[packageName];\n }\n}\n\n/**\n * Build ownership maps from the unified workspace index.\n *\n * - Directory keys (trailing \"/\") are expanded to the concrete file paths that\n * currently exist on disk under the mapped directories.\n * - File keys map directly to the listed workspace-relative paths.\n */\nexport async function buildWorkspaceOwnershipContext(\n cwd: string,\n index: WorkspaceIndex,\n opts: { excludePackage?: string } = {}\n): Promise<WorkspaceOwnershipContext> {\n const dirKeyOwners = new Map<string, WorkspaceConflictOwner[]>();\n const installedPathOwners = new Map<string, WorkspaceConflictOwner>();\n\n const packages = index.packages ?? {};\n const exclude = opts.excludePackage;\n\n for (const [rawName, pkg] of Object.entries(packages)) {\n if (exclude && rawName === exclude) continue;\n if (!pkg || typeof pkg !== 'object') continue;\n\n const pkgName = rawName;\n const files = pkg.files ?? {};\n\n for (const [rawKey, rawValues] of Object.entries(files)) {\n if (!Array.isArray(rawValues)) continue;\n const normalizedKey = normalizePathForProcessing(rawKey);\n if (!normalizedKey) continue;\n\n const owner: WorkspaceConflictOwner = {\n packageName: pkgName,\n key: normalizedKey,\n type: normalizedKey.endsWith('/') ? 'dir' : 'file'\n };\n\n if (owner.type === 'dir') {\n if (!dirKeyOwners.has(normalizedKey)) {\n dirKeyOwners.set(normalizedKey, []);\n }\n dirKeyOwners.get(normalizedKey)!.push(owner);\n\n for (const rawMapping of rawValues) {\n // Handle both simple string and WorkspaceIndexFileMapping\n const targetPath = typeof rawMapping === 'string' ? rawMapping : rawMapping.target;\n const dirRel = normalizePathForProcessing(targetPath);\n if (!dirRel) continue;\n const expanded = await collectFilesUnderDirectory(cwd, dirRel);\n for (const relFile of expanded) {\n if (!installedPathOwners.has(relFile)) {\n installedPathOwners.set(relFile, owner);\n }\n }\n }\n continue;\n }\n\n // file key\n for (const rawMapping of rawValues) {\n // Handle both simple string and WorkspaceIndexFileMapping\n const targetPath = typeof rawMapping === 'string' ? rawMapping : rawMapping.target;\n const relPath = normalizePathForProcessing(targetPath);\n if (!relPath) continue;\n if (!installedPathOwners.has(relPath)) {\n installedPathOwners.set(relPath, owner);\n }\n }\n }\n }\n\n return { dirKeyOwners, installedPathOwners };\n}\n\nasync function collectFilesUnderDirectory(cwd: string, dirRel: string): Promise<string[]> {\n const normalizedDir = normalizePathForProcessing(dirRel);\n if (!normalizedDir) return [];\n\n const absDir = join(cwd, normalizedDir);\n if (!(await exists(absDir))) {\n return [];\n }\n\n const collected: string[] = [];\n for await (const absFile of walkFiles(absDir)) {\n const rel = normalizePathForProcessing(absFile.slice(absDir.length + 1));\n if (rel) {\n collected.push(normalizePathForProcessing(join(normalizedDir, rel)));\n }\n }\n return collected;\n}\n", "/**\n * Root File Uninstaller\n * Utilities to remove package-marked sections from root files and delete empty files\n */\n\nimport { join } from 'path';\nimport { exists, readTextFile, writeTextFile } from '../../utils/fs.js';\nimport { logger } from '../../utils/logger.js';\nimport { getAllPlatforms, getPlatformDefinition } from '../platforms.js';\nimport { buildOpenMarkerRegex, CLOSE_MARKER_REGEX } from '../../utils/root-file-extractor.js';\nimport { FILE_PATTERNS } from '../../constants/index.js';\n\n/** Remove a single package section from root-file content using markers */\nfunction stripPackageSection(content: string, packageName: string): { changed: boolean; content: string } {\n if (!content) return { changed: false, content };\n const openRe = buildOpenMarkerRegex(packageName);\n const closeRe = CLOSE_MARKER_REGEX;\n const openMatch = openRe.exec(content);\n if (!openMatch) return { changed: false, content };\n const before = content.slice(0, openMatch.index);\n const rest = content.slice(openMatch.index + openMatch[0].length);\n const closeMatch = closeRe.exec(rest);\n if (!closeMatch) return { changed: false, content };\n const after = rest.slice(closeMatch.index + closeMatch[0].length);\n return { changed: true, content: before + after };\n}\n\n/** Remove multiple package sections from content */\nfunction stripMultiplePackageSections(content: string, packageNames: string[]): { changed: boolean; content: string } {\n let changed = false;\n let current = content;\n for (const name of packageNames) {\n const result = stripPackageSection(current, name);\n if (result.changed) changed = true;\n current = result.content;\n }\n return { changed, content: current };\n}\n\n/** Discover platform root filenames from platform definitions */\nfunction getUniqueRootFilenames(): string[] {\n const set = new Set<string>([FILE_PATTERNS.AGENTS_MD]);\n for (const platform of getAllPlatforms()) {\n const def = getPlatformDefinition(platform);\n if (def.rootFile) set.add(def.rootFile);\n }\n return Array.from(set);\n}\n\n/**\n * Process root file removals: strip package sections from root files.\n * When dryRun is true, returns the list of files that would be updated without writing.\n * When dryRun is false (default), writes the changes and returns the updated files.\n */\nexport async function processRootFileRemovals(\n targetDir: string,\n packageNames: string[],\n options: { dryRun?: boolean } = {}\n): Promise<{ updated: string[] }> {\n const updated: string[] = [];\n const rootFiles = getUniqueRootFilenames();\n\n for (const filename of rootFiles) {\n const absPath = join(targetDir, filename);\n if (!(await exists(absPath))) continue;\n\n const original = await readTextFile(absPath);\n const { changed, content } = stripMultiplePackageSections(original, packageNames);\n if (!changed) continue;\n\n if (!options.dryRun) {\n await writeTextFile(absPath, content);\n logger.debug(`Updated root file: ${absPath}`);\n }\n\n updated.push(filename);\n }\n\n return { updated };\n}\n", "/**\n * Subsumption Resolver\n *\n * Detects and resolves overlapping installations between resource-scoped\n * installs (e.g., gh@user/repo/agents/agent1) and full-package installs\n * (e.g., gh@user/repo) from the same source.\n *\n * Two scenarios:\n * 1. Resource installed first, then full package -> auto-replace resource entry\n * 2. Full package installed first, then resource -> skip (already covered)\n */\n\nimport type { PackageSource } from '../unified/context.js';\nimport type { WorkspaceIndex, WorkspaceIndexPackage } from '../../../types/workspace-index.js';\nimport { readWorkspaceIndex } from '../../../utils/workspace-index-yml.js';\nimport { normalizePackageName } from '../../../utils/package-name.js';\nimport { normalizeGitUrl } from '../../../utils/git-url-parser.js';\nimport type { ExecutionContext } from '../../../types/execution-context.js';\nimport { runUninstallPipeline } from '../../uninstall/uninstall-pipeline.js';\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport type SubsumptionResult =\n | SubsumptionNone\n | SubsumptionUpgrade\n | SubsumptionAlreadyCovered;\n\nexport interface SubsumptionNone {\n type: 'none';\n}\n\nexport interface SubsumptionUpgrade {\n type: 'upgrade';\n /** Resource-scoped entries that will be replaced by the full package */\n entriesToRemove: SubsumedEntry[];\n}\n\nexport interface SubsumptionAlreadyCovered {\n type: 'already-covered';\n /** The full-package name that already covers this resource */\n coveringPackage: string;\n}\n\nexport interface SubsumedEntry {\n /** Package name as recorded in the workspace index (e.g., gh@user/repo/agents/agent1) */\n packageName: string;\n}\n\n// ============================================================================\n// Detection\n// ============================================================================\n\n/**\n * Extract the source identity (normalized git URL or package name) and the\n * resource suffix from an installation context's source and package name.\n * \n * Returns null if the source type is not supported for subsumption.\n */\nexport function extractSourceIdentity(source: PackageSource): {\n /** Canonical source identifier (normalized git URL or registry name) */\n sourceKey: string;\n /** The gh@owner/repo prefix (for git) or package name (for registry/path) */\n basePackageName: string;\n /** Whether this install targets a specific resource within the package */\n isResourceScoped: boolean;\n} | null {\n const normalizedName = normalizePackageName(source.packageName);\n\n if (source.type === 'git' && source.gitUrl) {\n const normalizedUrl = normalizeGitUrl(source.gitUrl);\n // For git sources, the \"base\" package name is gh@owner/repo\n // A resource-scoped install has a longer name like gh@owner/repo/agents/agent1\n const ghMatch = normalizedName.match(/^gh@([^/]+\\/[^/]+)/);\n const basePackageName = ghMatch ? `gh@${ghMatch[1]}` : normalizedName;\n const isResourceScoped = Boolean(source.resourcePath) ||\n (normalizedName !== basePackageName);\n\n return {\n sourceKey: normalizedUrl,\n basePackageName,\n isResourceScoped\n };\n }\n\n if (source.type === 'path' && source.localPath) {\n // For path sources, the source key is the absolute path\n // Resource scoping is indicated by resourcePath\n const isResourceScoped = Boolean(source.resourcePath);\n return {\n sourceKey: `path:${source.localPath}`,\n basePackageName: normalizedName,\n isResourceScoped\n };\n }\n\n if (source.type === 'registry') {\n // Registry sources: the sourceKey is the normalized package name\n const isResourceScoped = Boolean(source.resourcePath || source.registryPath);\n return {\n sourceKey: `registry:${normalizedName}`,\n basePackageName: normalizedName,\n isResourceScoped\n };\n }\n\n return null;\n}\n\n/**\n * Determine if an existing workspace index entry belongs to the same source\n * as the incoming install. This matches based on:\n * - Git: normalized URL from the cached path pattern\n * - Path: absolute path prefix\n * - Registry: package name prefix\n * \n * For git sources, two entries share a source if the workspace index `path`\n * field points into the same git cache directory (same URL hash + commit hash).\n */\nfunction entrySameSource(\n existingName: string,\n existingEntry: WorkspaceIndexPackage,\n sourceKey: string,\n basePackageName: string,\n sourceType: PackageSource['type']\n): boolean {\n const normalizedExistingName = normalizePackageName(existingName);\n const normalizedBase = normalizePackageName(basePackageName);\n\n if (sourceType === 'git') {\n // For git sources, check if names share the same gh@owner/repo prefix\n // e.g., gh@user/repo and gh@user/repo/agents/agent1\n if (normalizedExistingName.startsWith(normalizedBase + '/') ||\n normalizedExistingName === normalizedBase) {\n return true;\n }\n // Also check if the base name is a prefix of the existing name\n if (normalizedBase.startsWith(normalizedExistingName + '/') ||\n normalizedBase === normalizedExistingName) {\n return true;\n }\n return false;\n }\n\n if (sourceType === 'path') {\n // For path sources, entries from the same local directory\n return normalizedExistingName === normalizedBase ||\n normalizedExistingName.startsWith(normalizedBase + '/') ||\n normalizedBase.startsWith(normalizedExistingName + '/');\n }\n\n if (sourceType === 'registry') {\n return normalizedExistingName === normalizedBase ||\n normalizedExistingName.startsWith(normalizedBase + '/') ||\n normalizedBase.startsWith(normalizedExistingName + '/');\n }\n\n return false;\n}\n\n/**\n * Check for subsumption between the incoming install and existing entries.\n * \n * Uses a bidirectional approach: instead of guessing whether the incoming\n * install is \"full\" or \"resource-scoped\" (which breaks for marketplace plugins\n * at subpaths like gh@owner/repo/plugins/feature-dev), we check BOTH\n * directions purely based on package name prefix relationships:\n * \n * 1. Upgrade: existing entries whose names start with incoming + '/' are\n * resource-scoped children of the incoming install \u2192 remove them.\n * 2. Already-covered: an existing entry whose name is a prefix of incoming\n * means the incoming resource is already covered \u2192 skip.\n * \n * @param source - The PackageSource for the incoming install\n * @param targetDir - The workspace target directory\n * @returns SubsumptionResult describing what action to take\n */\nexport async function checkSubsumption(\n source: PackageSource,\n targetDir: string\n): Promise<SubsumptionResult> {\n const identity = extractSourceIdentity(source);\n if (!identity) {\n return { type: 'none' };\n }\n\n const { sourceKey, basePackageName } = identity;\n\n // Read current workspace index\n const wsRecord = await readWorkspaceIndex(targetDir);\n const packages = wsRecord.index.packages ?? {};\n\n if (Object.keys(packages).length === 0) {\n return { type: 'none' };\n }\n\n const normalizedIncoming = normalizePackageName(source.packageName);\n\n // -------------------------------------------------------------------\n // Direction 1 (upgrade): Are there existing entries whose names start\n // with the incoming name + '/'? If so, the incoming install subsumes\n // those more-specific (resource-scoped) entries.\n // -------------------------------------------------------------------\n const entriesToRemove: SubsumedEntry[] = [];\n\n for (const existingName of Object.keys(packages)) {\n if (normalizePackageName(existingName) === normalizedIncoming) {\n // Same package name -- not a subsumption, just a reinstall/update\n continue;\n }\n\n if (entrySameSource(existingName, packages[existingName], sourceKey, basePackageName, source.type)) {\n const normalizedExisting = normalizePackageName(existingName);\n if (normalizedExisting.startsWith(normalizedIncoming + '/')) {\n entriesToRemove.push({ packageName: existingName });\n }\n }\n }\n\n if (entriesToRemove.length > 0) {\n return { type: 'upgrade', entriesToRemove };\n }\n\n // -------------------------------------------------------------------\n // Direction 2 (already-covered): Is there an existing entry whose name\n // is a strict prefix of the incoming name? If so, the incoming resource\n // is already installed via that broader package \u2192 skip.\n // -------------------------------------------------------------------\n for (const existingName of Object.keys(packages)) {\n if (!entrySameSource(existingName, packages[existingName], sourceKey, basePackageName, source.type)) {\n continue;\n }\n\n const normalizedExisting = normalizePackageName(existingName);\n if (normalizedIncoming.startsWith(normalizedExisting + '/')) {\n return {\n type: 'already-covered',\n coveringPackage: existingName\n };\n }\n }\n\n return { type: 'none' };\n}\n\n// ============================================================================\n// Resolution\n// ============================================================================\n\n/**\n * Resolve a subsumption upgrade by removing subsumed entries via the uninstall\n * pipeline. This ensures correct handling of merged files (key removal), root\n * files, and directory cleanup, so the subsequent full-package install does\n * not hit exists-unowned conflicts (which would trigger namespacing instead\n * of installing to canonical paths).\n *\n * @param result - The upgrade subsumption result\n * @param execContext - Execution context (targetDir, etc.) for uninstall operations\n */\nexport async function resolveSubsumption(\n result: SubsumptionUpgrade,\n execContext: ExecutionContext,\n): Promise<void> {\n for (const entry of result.entriesToRemove) {\n const uninstallResult = await runUninstallPipeline(entry.packageName, {}, execContext);\n\n if (!uninstallResult.success) {\n throw new Error(\n `Failed to remove subsumed package ${entry.packageName}: ${uninstallResult.error ?? 'unknown error'}`\n );\n }\n }\n}\n", "/**\n * Sync Version Checker\n *\n * Central module for version constraint checking during sync/install.\n * Reads source and consumer versions, validates constraints, and\n * updates manifest ranges and workspace index versions.\n */\n\nimport { join } from 'path';\nimport { readLockfile, writeLockfile } from '../../utils/lockfile-yml.js';\nimport { parsePackageYml } from '../../utils/package-yml.js';\nimport { getLocalPackageYmlPath } from '../../utils/paths.js';\nimport { arePackageNamesEquivalent, normalizePackageName } from '../../utils/package-name.js';\nimport { satisfiesVersion, createCaretRange } from '../../utils/version-ranges.js';\nimport { extractBaseVersion } from '../../utils/version-generator.js';\nimport { isUnversionedVersion } from '../package-versioning.js';\nimport { addPackageToYml } from '../package-management.js';\nimport { readWorkspaceIndex, writeWorkspaceIndex } from '../../utils/workspace-index-yml.js';\nimport { NonInteractivePromptError } from '../ports/console-prompt.js';\nimport { resolveVersionMismatchInteractively } from './sync-version-resolver.js';\nimport type { PromptPort } from '../ports/prompt.js';\nimport { logger } from '../../utils/logger.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport type VersionCheckStatus = 'unconstrained' | 'satisfied' | 'mismatch';\n\nexport interface VersionCheckResult {\n status: VersionCheckStatus;\n sourceVersion?: string;\n manifestRange?: string;\n suggestedRange?: string;\n}\n\nexport interface VersionUpdateInfo {\n oldVersion?: string;\n newVersion: string;\n oldRange?: string;\n newRange?: string;\n}\n\nexport type VersionResolutionOutcome =\n | { action: 'proceed'; update: VersionUpdateInfo }\n | { action: 'skip' }\n | { action: 'none' };\n\n// ---------------------------------------------------------------------------\n// Read source package version\n// ---------------------------------------------------------------------------\n\n/**\n * Read the source package's current version from its openpackage.yml.\n * Tries root `openpackage.yml`, then `.openpackage/openpackage.yml`.\n */\nexport async function readSourcePackageVersion(packageRoot: string): Promise<string | undefined> {\n const candidates = [\n join(packageRoot, 'openpackage.yml'),\n join(packageRoot, '.openpackage', 'openpackage.yml'),\n ];\n\n for (const candidate of candidates) {\n try {\n const yml = await parsePackageYml(candidate);\n return yml.version;\n } catch {\n // File missing or parse error \u2014 try next candidate\n }\n }\n\n return undefined;\n}\n\n// ---------------------------------------------------------------------------\n// Read manifest range for a dependency\n// ---------------------------------------------------------------------------\n\n/**\n * Read the consumer workspace's manifest range for a specific dependency.\n * Searches both `dependencies` and `dev-dependencies`.\n * Returns `undefined` if no manifest, no dep entry, or no version field.\n */\nexport async function readManifestRangeForDependency(\n cwd: string,\n packageName: string,\n): Promise<string | undefined> {\n const manifestPath = getLocalPackageYmlPath(cwd);\n\n try {\n const config = await parsePackageYml(manifestPath);\n const normalized = normalizePackageName(packageName);\n\n const allDeps = [\n ...(config.dependencies ?? []),\n ...(config['dev-dependencies'] ?? []),\n ];\n\n for (const dep of allDeps) {\n if (arePackageNamesEquivalent(dep.name, normalized)) {\n return dep.version;\n }\n }\n\n return undefined;\n } catch {\n return undefined;\n }\n}\n\n// ---------------------------------------------------------------------------\n// Check version constraint (pure)\n// ---------------------------------------------------------------------------\n\n/**\n * Pure validation: does the source version satisfy the manifest range?\n *\n * Unconstrained when either side is undefined or unversioned.\n */\nexport function checkVersionConstraint(\n sourceVersion: string | undefined,\n manifestRange: string | undefined,\n): VersionCheckResult {\n // Unconstrained: no source version or unversioned\n if (!sourceVersion || isUnversionedVersion(sourceVersion)) {\n return { status: 'unconstrained', sourceVersion, manifestRange };\n }\n\n // Unconstrained: no manifest range\n if (!manifestRange) {\n return { status: 'unconstrained', sourceVersion, manifestRange };\n }\n\n // Check satisfaction\n if (satisfiesVersion(sourceVersion, manifestRange)) {\n return { status: 'satisfied', sourceVersion, manifestRange };\n }\n\n // Mismatch \u2014 compute suggested range\n const baseVersion = extractBaseVersion(sourceVersion);\n const suggestedRange = createCaretRange(baseVersion);\n\n return {\n status: 'mismatch',\n sourceVersion,\n manifestRange,\n suggestedRange,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Resolve version mismatch (shared by sync and install)\n// ---------------------------------------------------------------------------\n\n/**\n * Resolve a version mismatch through the decision cascade:\n * 1. versionOverride \u2192 use it as new range\n * 2. force \u2192 auto-generate caret range\n * 3. interactive \u2192 prompt user\n * 4. non-interactive \u2192 throw with actionable error\n *\n * Returns the new range to use, or 'skip' if user chose to skip.\n */\nexport async function resolveVersionMismatch(\n packageName: string,\n check: VersionCheckResult,\n options: { versionOverride?: string; force?: boolean },\n prompt: PromptPort,\n commandName: string,\n): Promise<{ action: 'update'; newRange: string } | { action: 'skip' }> {\n // 1. Version override from @<range> notation\n if (options.versionOverride) {\n return { action: 'update', newRange: options.versionOverride };\n }\n\n // 2. Force flag \u2192 auto-generate caret range\n if (options.force) {\n const baseVersion = extractBaseVersion(check.sourceVersion!);\n const newRange = createCaretRange(baseVersion);\n return { action: 'update', newRange };\n }\n\n // 3. Interactive resolution (throws NonInteractivePromptError if non-TTY)\n try {\n return await resolveVersionMismatchInteractively(packageName, check, prompt);\n } catch (error) {\n // 4. Non-interactive \u2192 actionable error\n if (error instanceof NonInteractivePromptError) {\n const suggested = check.suggestedRange ?? `^${check.sourceVersion}`;\n throw new Error(\n `Version mismatch: source '${packageName}' is at ${check.sourceVersion} ` +\n `but manifest requires ${check.manifestRange}.\\n\\n` +\n `To resolve, use one of:\\n` +\n ` opkg ${commandName} ${packageName}@${suggested} (update manifest range)\\n` +\n ` opkg ${commandName} --force (auto-update to caret range)\\n`\n );\n }\n throw error;\n }\n}\n\n// ---------------------------------------------------------------------------\n// Update manifest range\n// ---------------------------------------------------------------------------\n\n/**\n * Update the consumer's manifest with a new version range for a dependency.\n * Preserves the dependency's current location (dependencies vs dev-dependencies).\n */\nexport async function updateManifestRange(\n cwd: string,\n packageName: string,\n sourceVersion: string,\n newRange: string,\n): Promise<void> {\n await addPackageToYml(\n cwd,\n packageName,\n sourceVersion,\n /* isDev */ false,\n /* originalVersion */ newRange,\n /* silent */ true,\n );\n}\n\n// ---------------------------------------------------------------------------\n// Update index version\n// ---------------------------------------------------------------------------\n\n/**\n * Update the workspace index `version` field for a package.\n * Same read-modify-write pattern as updatePullHashesFromPipeline.\n */\nexport async function updateIndexVersion(\n cwd: string,\n packageName: string,\n newVersion: string,\n): Promise<void> {\n try {\n const record = await readWorkspaceIndex(cwd);\n const pkg = record.index.packages?.[packageName];\n if (!pkg) return;\n\n pkg.version = newVersion;\n await writeWorkspaceIndex(record);\n logger.debug(`Updated workspace index version for ${packageName} to ${newVersion}`);\n\n // Update lockfile version too (best-effort)\n try {\n const lockRecord = await readLockfile(cwd);\n const lockPkg = lockRecord.lockfile.packages[packageName];\n if (lockPkg) {\n lockPkg.version = newVersion;\n await writeLockfile(lockRecord);\n }\n } catch { /* lockfile update is best-effort */ }\n } catch (error) {\n logger.warn(`Failed to update index version: ${error}`);\n }\n}\n", "/**\n * Sync Version Resolver\n *\n * Interactive version mismatch resolution for human mode.\n * Mirrors sync-conflict-resolver.ts structure.\n */\n\nimport type { PromptPort } from '../ports/prompt.js';\nimport type { VersionCheckResult } from './sync-version-checker.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport type VersionResolution =\n | { action: 'update'; newRange: string }\n | { action: 'skip' };\n\n// ---------------------------------------------------------------------------\n// Interactive resolver\n// ---------------------------------------------------------------------------\n\n/**\n * Prompt the user to resolve a version mismatch interactively.\n *\n * Offers three choices:\n * 1. Update manifest to suggested caret range\n * 2. Enter a custom range\n * 3. Skip (abort sync for this package)\n */\nexport async function resolveVersionMismatchInteractively(\n packageName: string,\n check: VersionCheckResult,\n prompt: PromptPort,\n): Promise<VersionResolution> {\n const suggested = check.suggestedRange ?? `^${check.sourceVersion}`;\n\n const choice = await prompt.select<'update' | 'custom' | 'skip'>(\n `Version mismatch for ${packageName}: source is ${check.sourceVersion} but manifest requires ${check.manifestRange}`,\n [\n { title: `Update manifest range to ${suggested}`, value: 'update' },\n { title: 'Enter a custom version range', value: 'custom' },\n { title: 'Skip this package', value: 'skip' },\n ],\n );\n\n if (choice === 'update') {\n return { action: 'update', newRange: suggested };\n }\n\n if (choice === 'custom') {\n const customRange = await prompt.text('Enter version range:', {\n placeholder: suggested,\n initial: suggested,\n });\n return { action: 'update', newRange: customRange };\n }\n\n return { action: 'skip' };\n}\n", "import type { CommandResult } from '../../../types/index.js';\nimport type { InstallationContext } from './context.js';\nimport type { UnifiedSpinner } from '../../ports/output.js';\nimport { loadPackagePhase } from './phases/load-package.js';\nimport { convertPhase } from './phases/convert.js';\nimport { processConflictsPhase } from './phases/conflicts.js';\nimport { executeInstallationPhase } from './phases/execute.js';\nimport { updateManifestPhase } from './phases/manifest.js';\nimport { reportResultsPhase } from './phases/report.js';\nimport { shouldUpdateManifest } from './context-helpers.js';\nimport { logger } from '../../../utils/logger.js';\nimport { createWorkspacePackageYml } from '../../package-management.js';\nimport { cleanupTempDirectory } from '../strategies/helpers/temp-directory.js';\nimport { resolveOutput, resolvePrompt } from '../../ports/resolve.js';\nimport { checkSubsumption, resolveSubsumption } from '../orchestrator/subsumption-resolver.js';\nimport {\n readManifestRangeForDependency,\n checkVersionConstraint,\n resolveVersionMismatch,\n updateManifestRange,\n} from '../../sync/sync-version-checker.js';\n\nfunction assertPipelineContextComplete(ctx: InstallationContext): void {\n if (!ctx.source.type) {\n throw new Error('Pipeline context invalid: ctx.source.type is required');\n }\n if (!ctx.source.packageName) {\n throw new Error('Pipeline context invalid: ctx.source.packageName must be set after load phase');\n }\n if (!ctx.source.contentRoot) {\n throw new Error('Pipeline context invalid: ctx.source.contentRoot must be set after load phase');\n }\n if (!Array.isArray(ctx.resolvedPackages) || ctx.resolvedPackages.length === 0) {\n throw new Error('Pipeline context invalid: ctx.resolvedPackages must contain a root package after load phase');\n }\n if (!ctx.resolvedPackages.some(p => (p as any).isRoot)) {\n throw new Error('Pipeline context invalid: ctx.resolvedPackages must contain an isRoot package');\n }\n}\n\n/**\n * Subsumption phase: detect and resolve overlapping installations.\n *\n * Runs after the load phase (so packageName is populated) and before convert.\n * Returns 'skip' when the incoming install is already covered by a broader\n * package, 'proceed' otherwise (including after resolving upgrade scenarios).\n *\n * Skipped when:\n * - force flag is set (user explicitly wants to reinstall)\n * - _subsumptionChecked is true (already filtered by runMultiContextPipeline)\n *\n * Exported for testability.\n */\nexport async function subsumptionPhase(ctx: InstallationContext): Promise<'proceed' | 'skip'> {\n if (ctx.options?.force || ctx._subsumptionChecked) {\n return 'proceed';\n }\n\n const result = await checkSubsumption(ctx.source, ctx.targetDir);\n\n switch (result.type) {\n case 'upgrade':\n await resolveSubsumption(result, ctx.execution);\n ctx._replacedResources = result.entriesToRemove.map(e => e.packageName);\n return 'proceed';\n\n case 'already-covered': {\n const out = resolveOutput(ctx.execution);\n const resourcePath = ctx.source.resourcePath ||\n ctx.source.packageName.replace(/^.*?\\/[^/]+\\/[^/]+\\//, '');\n out.info(`Skipped: ${resourcePath} is already installed via ${result.coveringPackage}`);\n return 'skip';\n }\n\n case 'none':\n default:\n return 'proceed';\n }\n}\n\n/**\n * Unified installation pipeline\n * \n * Handles all installation scenarios (install, apply, bulk) with conditional phase execution\n * based on the context mode.\n * \n * @param ctx - Installation context\n * @returns Command result\n */\nexport async function runUnifiedInstallPipeline(\n ctx: InstallationContext\n): Promise<CommandResult> {\n logger.info(`Starting unified installation pipeline`, {\n mode: ctx.mode,\n sourceType: ctx.source.type,\n packageName: ctx.source.packageName\n });\n \n const out = resolveOutput(ctx.execution);\n const displayName = ctx.source.packageName || 'package';\n let tempConversionRoot: string | null = null;\n\n // Spinner 1: covers phases 0\u20132 (workspace manifest, load, subsumption, convert).\n // Stopped before the conflict phase which may prompt the user.\n let spinner1: UnifiedSpinner | undefined;\n try {\n spinner1 = out.spinner();\n spinner1.start(`Installing ${displayName}`);\n\n // Phase 0: Ensure workspace manifest exists (auto-create if needed)\n // Only for install mode, not apply mode (apply requires existing installation)\n if (ctx.mode === 'install') {\n await createWorkspacePackageYml(ctx.targetDir);\n }\n\n // Phase 1: Load package from source (always)\n // Pass spinner1 so loadPackagePhase updates our message instead of creating its own.\n await loadPackagePhase(ctx, undefined, spinner1);\n\n // Assert context is complete after load phase\n assertPipelineContextComplete(ctx);\n\n // Phase 1.5: Subsumption \u2014 detect overlapping installations.\n // Must run after load (packageName is now set) and before convert (avoid wasted work).\n const subsumptionOutcome = await subsumptionPhase(ctx);\n if (subsumptionOutcome === 'skip') {\n spinner1.stop();\n return createAlreadyCoveredResult(ctx);\n }\n\n // Phase 1.7: Version constraint validation (mutable sources only).\n // Stop spinner before potential interactive prompt, restart after.\n if (ctx.source.type === 'path' || ctx.source.type === 'workspace') {\n spinner1.stop();\n await versionValidationPhase(ctx);\n spinner1 = out.spinner();\n spinner1.start(`Preparing ${ctx.source.packageName}`);\n }\n\n // Phase 2: Convert package format if needed.\n // After load phase, ctx.source.packageName is guaranteed set by assertPipelineContextComplete.\n spinner1.message(`Preparing ${ctx.source.packageName}`);\n await convertPhase(ctx);\n\n tempConversionRoot = ctx._tempConversionRoot ?? null;\n\n spinner1.stop(`Prepared ${ctx.source.packageName}`);\n } catch (error) {\n spinner1?.stop();\n // convertPhase may have created a temp dir before failing\n await cleanupTempDirectory(ctx._tempConversionRoot ?? null);\n throw error;\n }\n\n // Phases 4\u20137: wrapped in try/finally for temp directory cleanup.\n // cleanupTempDirectory(null) is a no-op, so this is safe even on early returns\n // before tempConversionRoot is set.\n let spinner2: UnifiedSpinner | undefined;\n try {\n // Phase 4: Process conflicts (may prompt \u2014 no spinner active)\n const shouldProceed = await processConflictsPhase(ctx);\n if (!shouldProceed) {\n return createCancellationResult(ctx);\n }\n\n // Spinner 2: covers phases 5\u20136 (file installation, manifest update).\n spinner2 = out.spinner();\n spinner2.start(`Installing ${ctx.source.packageName}`);\n\n // Phase 5: Execute installation (always)\n const installResult = await executeInstallationPhase(ctx);\n\n // Check for complete failure\n if (installResult.hadErrors && !installResult.installedAnyFiles) {\n spinner2.stop();\n return {\n success: false,\n error: `Failed to install ${ctx.source.packageName}: ${ctx.errors.join('; ')}`\n };\n }\n\n // Phase 6: Update manifest (skip for apply)\n if (shouldUpdateManifest(ctx)) {\n await updateManifestPhase(ctx);\n }\n\n spinner2.stop(`Installed ${ctx.source.packageName}`);\n\n // Phase 7: Report results (always)\n return await reportResultsPhase(ctx, installResult);\n\n } catch (error) {\n spinner2?.stop();\n\n logger.debug(`Pipeline failed for ${ctx.source.packageName}:`, error);\n\n const errorMessage = error instanceof Error ? error.message : String(error);\n out.error(`Failed to install ${ctx.source.packageName}: ${errorMessage}`);\n\n return {\n success: false,\n error: errorMessage,\n warnings: ctx.warnings.length > 0 ? ctx.warnings : undefined\n };\n } finally {\n await cleanupTempDirectory(tempConversionRoot);\n }\n}\n\n/**\n * Create result for user cancellation\n */\nfunction createCancellationResult(ctx: InstallationContext): CommandResult {\n const out = resolveOutput(ctx.execution);\n out.info('Installation cancelled by user');\n \n return {\n success: true,\n data: {\n packageName: ctx.source.packageName,\n installed: 0,\n skipped: 1,\n totalPackages: 0\n }\n };\n}\n\n/**\n * Create result when install is skipped because a covering package already exists.\n */\nfunction createAlreadyCoveredResult(ctx: InstallationContext): CommandResult {\n return {\n success: true,\n data: {\n packageName: ctx.source.packageName,\n installed: 0,\n skipped: 1,\n reason: 'Already installed via broader package'\n }\n };\n}\n\n/**\n * Version validation phase for mutable sources.\n *\n * Checks if the source version satisfies the consumer's manifest range.\n * Uses the shared resolveVersionMismatch cascade (force/interactive/error).\n */\nasync function versionValidationPhase(ctx: InstallationContext): Promise<void> {\n const sourceVersion = ctx.source.version;\n if (!sourceVersion) return;\n\n const manifestRange = await readManifestRangeForDependency(\n ctx.targetDir, ctx.source.packageName,\n );\n const check = checkVersionConstraint(sourceVersion, manifestRange);\n\n if (check.status !== 'mismatch') return;\n\n const prompt = resolvePrompt(ctx.execution);\n const resolution = await resolveVersionMismatch(\n ctx.source.packageName, check, { force: ctx.options?.force }, prompt, 'install',\n );\n\n if (resolution.action === 'skip') {\n throw new Error(`Installation cancelled: version mismatch for ${ctx.source.packageName}`);\n }\n\n await updateManifestRange(ctx.targetDir, ctx.source.packageName, sourceVersion, resolution.newRange);\n}\n", "import type { CommandResult } from '../../../types/index.js';\nimport type { InstallationContext } from './context.js';\nimport type { InstallReportData } from '../install-reporting.js';\nimport { displayInstallationResults } from '../install-reporting.js';\nimport { runUnifiedInstallPipeline } from './pipeline.js';\nimport { resolveOutput } from '../../ports/resolve.js';\nimport { checkSubsumption } from '../orchestrator/subsumption-resolver.js';\nimport type { OutputPort } from '../../ports/output.js';\n\nexport interface MultiContextPipelineOptions {\n /** When true, suppress per-context reports and emit one grouped report at the end */\n groupReport?: boolean;\n /** Package name for the grouped report (uses first context's packageName when omitted) */\n groupReportPackageName?: string;\n /** When true, stop processing remaining contexts on the first failure */\n failFast?: boolean;\n}\n\nexport async function runMultiContextPipeline(\n contexts: InstallationContext[],\n options?: MultiContextPipelineOptions\n): Promise<CommandResult> {\n if (contexts.length === 0) {\n return { success: true, data: { installed: 0, skipped: 0, results: [] } };\n }\n\n const { groupReport, groupReportPackageName, failFast } = options ?? {};\n const out = resolveOutput(contexts[0].execution);\n\n // Pre-filter: remove contexts already covered by a broader installed package.\n // Mark surviving contexts so the pipeline phase does not re-check.\n const { active: activeContexts, skippedCount: preSkipped } =\n await filterSubsumedContexts(contexts, out);\n\n if (activeContexts.length === 0) {\n return {\n success: true,\n data: {\n installed: 0,\n skipped: preSkipped,\n results: [],\n reason: 'All resources already installed via full package'\n }\n };\n }\n\n let installed = 0;\n let skipped = preSkipped;\n let failed = 0;\n const results: Array<{ name: string; success: boolean; error?: string }> = [];\n const reportDataList: InstallReportData[] = [];\n\n for (const ctx of activeContexts) {\n if (groupReport) {\n ctx._deferredReport = true;\n }\n\n const result = await runUnifiedInstallPipeline(ctx);\n const name = (result.data as any)?.packageName || ctx.source.packageName || 'unknown';\n\n if (result.success) {\n installed += (result.data as any)?.installed ?? 0;\n skipped += (result.data as any)?.skipped ?? 0;\n const reportData = (result as any)._reportData;\n if (reportData) {\n reportDataList.push(reportData);\n }\n } else {\n failed += 1;\n if (failFast) {\n // Record remaining contexts as skipped\n results.push({\n name,\n success: false,\n error: result.error\n });\n break;\n }\n }\n\n results.push({\n name,\n success: result.success,\n error: result.success ? undefined : result.error\n });\n }\n\n if (groupReport && reportDataList.length > 0) {\n const merged = mergeInstallReportData(reportDataList, {\n packageName: groupReportPackageName ?? contexts[0].source.packageName,\n suppressHeader: true\n });\n const output = resolveOutput(contexts[0].execution);\n displayInstallationResults(merged, output);\n }\n\n const success = failed === 0;\n return {\n success,\n data: {\n installed,\n skipped,\n results\n },\n error: success ? undefined : `${failed} resource${failed === 1 ? '' : 's'} failed to install`\n };\n}\n\n/**\n * Filter out contexts whose resources are already covered by a broader\n * installed package (subsumption \"already-covered\" check).\n *\n * Contexts that survive filtering have `_subsumptionChecked` set to true\n * so the pipeline's own subsumption phase does not re-check them.\n * Exported for testability.\n */\nexport async function filterSubsumedContexts(\n contexts: InstallationContext[],\n out: OutputPort\n): Promise<{ active: InstallationContext[]; skippedCount: number }> {\n const active: InstallationContext[] = [];\n let skippedCount = 0;\n\n for (const ctx of contexts) {\n if (ctx.options?.force) {\n ctx._subsumptionChecked = true;\n active.push(ctx);\n continue;\n }\n\n const result = await checkSubsumption(ctx.source, ctx.targetDir);\n if (result.type === 'already-covered') {\n const resourcePath = ctx.source.resourcePath ||\n ctx.source.packageName.replace(/^.*?\\/[^/]+\\/[^/]+\\//, '');\n out.info(`Skipped: ${resourcePath} is already installed via ${result.coveringPackage}`);\n skippedCount++;\n } else {\n ctx._subsumptionChecked = true;\n active.push(ctx);\n }\n }\n\n return { active, skippedCount };\n}\n\nexport function mergeInstallReportData(\n list: InstallReportData[],\n overrides: { packageName?: string; suppressHeader?: boolean }\n): InstallReportData {\n const first = list[0];\n const installedFiles = list.flatMap(r => r.installedFiles ?? []);\n const updatedFiles = list.flatMap(r => r.updatedFiles ?? []);\n\n const rootInstalled = list.flatMap(r => r.rootFileResults?.installed ?? []);\n const rootUpdated = list.flatMap(r => r.rootFileResults?.updated ?? []);\n const rootSkipped = list.flatMap(r => r.rootFileResults?.skipped ?? []);\n\n const errorCount = list.reduce((sum, r) => sum + (r.errorCount ?? 0), 0);\n const errors = list.flatMap(r => r.errors ?? []);\n const namespacedFiles = list.flatMap(r => r.namespacedFiles ?? []);\n const relocatedFiles = list.flatMap(r => r.relocatedFiles ?? []);\n const claimedFiles = list.flatMap(r => r.claimedFiles ?? []);\n const replacedResources = list.flatMap(r => r.replacedResources ?? []);\n\n return {\n ...first,\n packageName: overrides.packageName ?? first.packageName,\n installedFiles,\n updatedFiles,\n rootFileResults: {\n installed: rootInstalled,\n updated: rootUpdated,\n skipped: rootSkipped\n },\n errorCount,\n errors,\n namespaced: namespacedFiles.length > 0 || undefined,\n namespacedFiles: namespacedFiles.length > 0 ? namespacedFiles : undefined,\n relocatedFiles,\n claimedFiles: claimedFiles.length > 0 ? claimedFiles : undefined,\n replacedResources: replacedResources.length > 0 ? replacedResources : undefined,\n resolvedPackages: first.resolvedPackages,\n suppressHeader: overrides.suppressHeader\n };\n}\n", "import { join } from 'path';\nimport { exists } from '../../../utils/fs.js';\nimport type { ConvenienceFilterOptions, ResourceInstallationSpec } from '../convenience-matchers.js';\nimport { applyConvenienceFilters, displayFilterErrors } from '../convenience-matchers.js';\nimport { buildResourceInstallContexts, prepareResourceContextsForMultiInstall } from '../unified/context-builders.js';\nimport { logger } from '../../../utils/logger.js';\nimport type { InstallationContext } from '../unified/context.js';\nimport type { LoadedPackage } from '../sources/base.js';\n\n/**\n * Find the base path where agents/skills/rules/commands subdirs live.\n * Registry packages may have these at contentRoot or under platform dirs (.opencode, .cursor, etc.).\n */\nexport async function findBaseForConvenienceFilters(\n contentRoot: string,\n options: ConvenienceFilterOptions\n): Promise<string> {\n const candidates = [contentRoot];\n for (const sub of ['.opencode', '.cursor', '.claude', '.claude-plugin']) {\n candidates.push(join(contentRoot, sub));\n }\n for (const base of candidates) {\n if (options.agents?.length && (await exists(join(base, 'agents')))) return base;\n if (options.skills?.length && (await exists(join(base, 'skills')))) return base;\n if (options.rules?.length && (await exists(join(base, 'rules')))) return base;\n if (options.commands?.length && (await exists(join(base, 'commands')))) return base;\n }\n return contentRoot;\n}\n\n/**\n * Resolve convenience resources (agents/skills) to install.\n *\n * This centralizes user-facing error display and \"continue with partial matches\"\n * behavior so strategies/handlers don't duplicate it.\n *\n * @throws Error if none of the requested resources can be found.\n */\nexport async function resolveConvenienceResources(\n basePath: string,\n repoRoot: string,\n options: ConvenienceFilterOptions\n): Promise<ResourceInstallationSpec[]> {\n const filterResult = await applyConvenienceFilters(basePath, repoRoot, options);\n\n if (filterResult.errors.length > 0) {\n displayFilterErrors(filterResult.errors);\n\n if (filterResult.resources.length === 0) {\n throw new Error('None of the requested resources were found');\n }\n\n logger.debug(`Continuing with ${filterResult.resources.length} resource(s)`);\n }\n\n return filterResult.resources;\n}\n\n/** Options for base path resolution */\nexport interface ConvenienceFilterInstallOpts {\n /** Use context.detectedBase as base (e.g. when loader already ran base detection). Otherwise uses findBaseForConvenienceFilters. */\n useDetectedBase?: boolean;\n}\n\n/**\n * Run the full convenience filter flow: resolve resources, build contexts, prepare for pipeline.\n * Shared by registry, path, and git strategies.\n *\n * @param context - Installation context (must have source populated)\n * @param loaded - Loaded package from loader\n * @param options - Convenience filter options (agents, skills, rules, commands)\n * @param opts - Base path resolution: useDetectedBase=true for git (has base detection)\n * @returns Resource contexts ready for createMultiResourceResult\n */\nexport async function runConvenienceFilterInstall(\n context: InstallationContext,\n loaded: LoadedPackage,\n options: ConvenienceFilterOptions,\n opts?: ConvenienceFilterInstallOpts\n): Promise<InstallationContext[]> {\n const contentRoot = loaded.contentRoot;\n const repoRoot = loaded.sourceMetadata?.repoPath ?? contentRoot;\n\n const basePath = opts?.useDetectedBase && context.detectedBase\n ? context.detectedBase\n : await findBaseForConvenienceFilters(contentRoot, options);\n\n const resources = await resolveConvenienceResources(basePath, repoRoot, options);\n const resourceContexts = buildResourceInstallContexts(context, resources, repoRoot);\n return prepareResourceContextsForMultiInstall(resourceContexts, repoRoot);\n}\n", "/**\n * Convenience matchers for --agents, --skills, --rules, and --commands filtering.\n * \n * Matches resource names against frontmatter fields and file/directory names\n * with deepest match resolution for ambiguous cases.\n */\n\nimport { join, basename, dirname, relative, resolve, sep } from 'path';\nimport { walkFiles } from '../../utils/file-walker.js';\nimport { exists, readTextFile } from '../../utils/fs.js';\nimport { splitFrontmatter } from '../markdown-frontmatter.js';\nimport { logger } from '../../utils/logger.js';\nimport type { OutputPort } from '../ports/output.js';\nimport { resolveOutput } from '../ports/resolve.js';\nimport { isMarkerFile, getMarkerFilename } from '../resources/resource-registry.js';\nimport type { ResourceType } from './resource-types.js';\nimport { findMarkerResourceFiles } from './resource-search.js';\n\n/**\n * Result of a resource match\n */\nexport interface ResourceMatchResult {\n /** Resource name that was searched for */\n name: string;\n \n /** Whether the resource was found */\n found: boolean;\n \n /** Path to the matched resource (if found) */\n path?: string;\n \n /** For skills, the directory to install (parent of SKILL.md) */\n installDir?: string;\n \n /** How the resource was matched */\n matchedBy?: 'frontmatter' | 'filename' | 'dirname';\n \n /** Version extracted from frontmatter (if present) */\n version?: string;\n \n /** Error message (if not found) */\n error?: string;\n}\n\n/**\n * Container for all filtering results\n */\nexport interface ResourceInstallationSpec {\n /** Resource name that was requested */\n name: string;\n\n /** Resource type */\n resourceType: ResourceType;\n\n /** Path to resource relative to repo root */\n resourcePath: string;\n\n /** Base path where resource was discovered */\n basePath: string;\n\n /** Resource kind for scoping */\n resourceKind: 'file' | 'directory' | 'plugin';\n\n /** How the resource was matched */\n matchedBy: 'frontmatter' | 'filename' | 'dirname';\n\n /** Version extracted from resource frontmatter (if present) */\n resourceVersion?: string;\n}\n\nexport interface ConvenienceMatcherResult {\n /** Matched resources to install */\n resources: ResourceInstallationSpec[];\n \n /** Errors for resources that weren't found */\n errors: string[];\n}\n\n/**\n * Options for convenience filtering\n */\nexport interface ConvenienceFilterOptions {\n /** Agent names to match */\n agents?: string[];\n \n /** Skill names to match */\n skills?: string[];\n \n /** Rule names to match */\n rules?: string[];\n \n /** Command names to match */\n commands?: string[];\n \n /** Plugin scope filter (marketplace context) */\n pluginScope?: string[];\n}\n\n/**\n * Apply convenience filters (--agents, --skills) to a resource.\n * \n * @param basePath - Base path to search from\n * @param options - Filter options\n * @returns Filter results with matched resources and errors\n */\nexport async function applyConvenienceFilters(\n basePath: string,\n repoRoot: string,\n options: ConvenienceFilterOptions\n): Promise<ConvenienceMatcherResult> {\n const resources: ResourceInstallationSpec[] = [];\n const errors: string[] = [];\n const baseRoot = resolve(basePath);\n const repoRootResolved = resolve(repoRoot);\n const scopeRoots = options.pluginScope?.map(scope => resolve(repoRootResolved, scope)) ?? [];\n\n const isInScope = (absPath: string): boolean => {\n if (scopeRoots.length === 0) {\n return true;\n }\n return scopeRoots.some(scopeRoot => {\n if (absPath === scopeRoot) {\n return true;\n }\n return absPath.startsWith(`${scopeRoot}${sep}`);\n });\n };\n\n const toRepoRelative = (absPath: string): string => {\n const rel = relative(repoRootResolved, absPath);\n return rel.replace(/\\\\/g, '/').replace(/^\\.\\/?/, '');\n };\n\n // Match agents\n if (options.agents && options.agents.length > 0) {\n const agentResults = await matchMarkdownResources(basePath, 'agents', 'Agent', options.agents);\n\n for (const result of agentResults) {\n if (result.found && result.path) {\n const absPath = resolve(result.path);\n if (!isInScope(absPath)) {\n errors.push(`Agent '${result.name}' not found in selected plugin scope`);\n continue;\n }\n const resourcePath = toRepoRelative(absPath);\n resources.push({\n name: result.name,\n resourceType: 'agent',\n resourcePath,\n basePath: baseRoot,\n resourceKind: 'file',\n matchedBy: (result.matchedBy || 'filename') as 'frontmatter' | 'filename' | 'dirname',\n resourceVersion: result.version\n });\n } else if (result.error) {\n errors.push(result.error);\n }\n }\n }\n\n // Match skills\n if (options.skills && options.skills.length > 0) {\n const skillResults = await matchSkills(basePath, options.skills);\n\n for (const result of skillResults) {\n if (result.found && result.path && result.installDir) {\n const absDir = resolve(result.installDir);\n if (!isInScope(absDir)) {\n errors.push(`Skill '${result.name}' not found in selected plugin scope`);\n continue;\n }\n const resourcePath = toRepoRelative(absDir);\n resources.push({\n name: result.name,\n resourceType: 'skill',\n resourcePath,\n basePath: baseRoot,\n resourceKind: 'directory',\n matchedBy: (result.matchedBy || 'dirname') as 'frontmatter' | 'filename' | 'dirname',\n resourceVersion: result.version\n });\n } else if (result.error) {\n errors.push(result.error);\n }\n }\n }\n\n // Match rules\n if (options.rules && options.rules.length > 0) {\n const ruleResults = await matchMarkdownResources(basePath, 'rules', 'Rule', options.rules);\n\n for (const result of ruleResults) {\n if (result.found && result.path) {\n const absPath = resolve(result.path);\n if (!isInScope(absPath)) {\n errors.push(`Rule '${result.name}' not found in selected plugin scope`);\n continue;\n }\n const resourcePath = toRepoRelative(absPath);\n resources.push({\n name: result.name,\n resourceType: 'rule',\n resourcePath,\n basePath: baseRoot,\n resourceKind: 'file',\n matchedBy: (result.matchedBy || 'filename') as 'frontmatter' | 'filename' | 'dirname',\n resourceVersion: result.version\n });\n } else if (result.error) {\n errors.push(result.error);\n }\n }\n }\n\n // Match commands\n if (options.commands && options.commands.length > 0) {\n const commandResults = await matchMarkdownResources(basePath, 'commands', 'Command', options.commands);\n\n for (const result of commandResults) {\n if (result.found && result.path) {\n const absPath = resolve(result.path);\n if (!isInScope(absPath)) {\n errors.push(`Command '${result.name}' not found in selected plugin scope`);\n continue;\n }\n const resourcePath = toRepoRelative(absPath);\n resources.push({\n name: result.name,\n resourceType: 'command',\n resourcePath,\n basePath: baseRoot,\n resourceKind: 'file',\n matchedBy: (result.matchedBy || 'filename') as 'frontmatter' | 'filename' | 'dirname',\n resourceVersion: result.version\n });\n } else if (result.error) {\n errors.push(result.error);\n }\n }\n }\n\n logger.info('Convenience filter results', {\n resourceCount: resources.length,\n errorCount: errors.length\n });\n\n return {\n resources,\n errors\n };\n}\n\n/**\n * Generic matcher for markdown file resources (agents, rules, commands).\n * Scans all .md files under basePath/subDir and matches by frontmatter name or filename.\n */\nasync function matchMarkdownResources(\n basePath: string,\n subDir: string,\n resourceLabel: string,\n requestedNames: string[]\n): Promise<ResourceMatchResult[]> {\n const results: ResourceMatchResult[] = [];\n \n const dir = join(basePath, subDir);\n const files: string[] = [];\n \n if (await exists(dir)) {\n for await (const file of walkFiles(dir)) {\n if (file.endsWith('.md')) {\n files.push(file);\n }\n }\n }\n\n for (const name of requestedNames) {\n const match = await findMarkdownResourceByName(files, name);\n \n if (match) {\n results.push({\n name,\n found: true,\n path: match.path,\n matchedBy: match.matchedBy,\n version: match.version\n });\n } else {\n results.push({\n name,\n found: false,\n error: `${resourceLabel} '${name}' not found`\n });\n }\n }\n\n return results;\n}\n\n/**\n * Find a markdown resource by name using frontmatter or filename.\n */\nasync function findMarkdownResourceByName(\n files: string[],\n name: string\n): Promise<{ path: string; matchedBy: 'frontmatter' | 'filename'; version?: string } | null> {\n // Priority 1: Frontmatter name match\n for (const file of files) {\n try {\n const content = await readTextFile(file);\n const { frontmatter } = splitFrontmatter(content);\n if (frontmatter?.name === name) {\n const version = extractVersionFromFrontmatter(frontmatter);\n return { path: file, matchedBy: 'frontmatter', version };\n }\n } catch (error) {\n // Ignore frontmatter parsing errors\n }\n }\n\n // Priority 2: Filename match (without .md extension)\n const byFilename = files.filter(f => basename(f, '.md') === name);\n\n if (byFilename.length === 1) {\n const file = byFilename[0];\n const version = await extractVersionFromFile(file);\n return { path: file, matchedBy: 'filename', version };\n }\n\n if (byFilename.length > 1) {\n // Deepest match - most segments in path\n const deepest = byFilename.sort((a, b) =>\n b.split('/').length - a.split('/').length\n )[0];\n const version = await extractVersionFromFile(deepest);\n return { path: deepest, matchedBy: 'filename', version };\n }\n\n return null;\n}\n\n/**\n * Match skills by name using SKILL.md frontmatter and directory name matching.\n * \n * @param basePath - Base path to search from\n * @param requestedNames - Skill names to find\n * @returns Array of match results\n */\nasync function matchSkills(\n basePath: string,\n requestedNames: string[]\n): Promise<ResourceMatchResult[]> {\n const results: ResourceMatchResult[] = [];\n const skillFiles = await findMarkerResourceFiles(basePath, 'skill');\n\n for (const name of requestedNames) {\n const match = await findSkillByName(skillFiles, name);\n \n if (match) {\n results.push({\n name,\n found: true,\n path: match.path,\n installDir: dirname(match.path), // Install entire parent directory\n matchedBy: match.matchedBy,\n version: match.version\n });\n } else {\n results.push({\n name,\n found: false,\n error: `Skill '${name}' not found (requires ${getMarkerFilename('skill')})`\n });\n }\n }\n\n return results;\n}\n\n/**\n * Find a skill by name using SKILL.md frontmatter or directory name.\n * Extracts version from SKILL.md frontmatter when available.\n */\nasync function findSkillByName(\n skillFiles: string[],\n name: string\n): Promise<{ path: string; matchedBy: 'frontmatter' | 'dirname'; version?: string } | null> {\n // Priority 1: Frontmatter name match in SKILL.md\n for (const file of skillFiles) {\n try {\n const content = await readTextFile(file);\n const { frontmatter } = splitFrontmatter(content);\n if (frontmatter?.name === name) {\n const version = extractVersionFromFrontmatter(frontmatter);\n return { path: file, matchedBy: 'frontmatter', version };\n }\n } catch (error) {\n // Ignore frontmatter parsing errors\n }\n }\n\n // Priority 2: Directory name match (immediate parent of SKILL.md)\n for (const file of skillFiles) {\n const dirName = basename(dirname(file));\n if (dirName === name) {\n const version = await extractVersionFromFile(file);\n return { path: file, matchedBy: 'dirname', version };\n }\n }\n\n // Priority 3: Nested directory name match (any ancestor directory)\n const matchingByNested = skillFiles.filter(file => {\n const dirPath = dirname(file);\n const segments = dirPath.split('/');\n return segments.includes(name);\n });\n\n if (matchingByNested.length > 0) {\n // Deepest match - most segments in path\n const deepest = matchingByNested.sort((a, b) =>\n b.split('/').length - a.split('/').length\n )[0];\n const version = await extractVersionFromFile(deepest);\n return { path: deepest, matchedBy: 'dirname', version };\n }\n\n return null;\n}\n\n/**\n * Display filter errors to the user.\n * \n * @param errors - Array of error messages\n * @param available - Available resources (for suggestions)\n */\nexport function displayFilterErrors(errors: string[], output?: OutputPort): void {\n if (errors.length === 0) {\n return;\n }\n\n const out = output ?? resolveOutput();\n out.error('\\nThe following resources were not found:');\n for (const error of errors) {\n out.error(` - ${error}`);\n }\n}\n\n/**\n * Extract version from frontmatter object.\n * Returns trimmed version string or undefined if not present/invalid.\n * Supports both top-level 'version' and nested 'metadata.version'.\n */\nfunction extractVersionFromFrontmatter(frontmatter: any): string | undefined {\n if (!frontmatter || typeof frontmatter !== 'object') {\n return undefined;\n }\n\n // Priority 1: Top-level version\n // Priority 2: Nested metadata.version\n const version = frontmatter.version ?? frontmatter.metadata?.version;\n\n if (typeof version === 'string') {\n const trimmed = version.trim();\n return trimmed.length > 0 ? trimmed : undefined;\n }\n\n return undefined;\n}\n\n/**\n * Extract version from a markdown file by parsing its frontmatter.\n * Returns undefined if file cannot be read or has no version.\n */\nasync function extractVersionFromFile(filePath: string): Promise<string | undefined> {\n try {\n const content = await readTextFile(filePath);\n const { frontmatter } = splitFrontmatter(content);\n return extractVersionFromFrontmatter(frontmatter);\n } catch (error) {\n return undefined;\n }\n}\n", "import { basename, relative } from 'path';\nimport { walkFiles } from '../../utils/fs.js';\nimport { WORKSPACE_DISCOVERY_EXCLUDES } from '../../constants/workspace.js';\nimport { getResourceTypeDef } from '../resources/resource-registry.js';\nimport type { ResourceTypeId } from '../../types/resources.js';\n\nfunction hasAncestorDirNamed(basePath: string, filePath: string, dirName: string): boolean {\n const relativePath = relative(basePath, filePath).replace(/\\\\/g, '/');\n const segments = relativePath.split('/').filter(Boolean);\n return segments.slice(0, -1).includes(dirName);\n}\n\nexport async function findMarkerResourceFiles(\n basePath: string,\n resourceType: ResourceTypeId\n): Promise<string[]> {\n const definition = getResourceTypeDef(resourceType);\n if (!definition.dirName || !definition.marker) {\n return [];\n }\n\n const files: string[] = [];\n\n for await (const file of walkFiles(basePath, [], { excludeDirs: WORKSPACE_DISCOVERY_EXCLUDES })) {\n if (basename(file) !== definition.marker) {\n continue;\n }\n\n if (!hasAncestorDirNamed(basePath, file, definition.dirName)) {\n continue;\n }\n\n files.push(file);\n }\n\n return files;\n}\n", "/**\n * Resource Selection Menu\n * \n * Interactive menu for selecting specific resources to install\n */\n\nimport type { OutputPort } from '../ports/output.js';\nimport type { PromptPort } from '../ports/prompt.js';\nimport { resolveOutput, resolvePrompt } from '../ports/resolve.js';\nimport { logger } from '../../utils/logger.js';\nimport { UserCancellationError } from '../../utils/errors.js';\nimport { getInstallableTypes, toLabelPlural, RESOURCE_TYPE_ORDER } from '../resources/resource-registry.js';\nimport type { ResourceCatalog, ResourceEntry } from '../resources/resource-catalog.js';\nimport type { \n ResourceDiscoveryResult, \n DiscoveredResource,\n SelectedResource,\n ResourceType \n} from './resource-types.js';\n\n/**\n * Display interactive resource selection menu\n * \n * @param discovery - Resource discovery result\n * @param packageName - Package name for display\n * @param packageVersion - Package version for display\n * @returns Array of selected resources (empty if cancelled)\n */\nexport async function promptResourceSelection(\n discovery: ResourceDiscoveryResult,\n packageName: string,\n packageVersion?: string,\n output?: OutputPort,\n prompt?: PromptPort\n): Promise<SelectedResource[]> {\n const out = output ?? resolveOutput();\n const prm = prompt ?? resolvePrompt();\n \n logger.debug('Prompting resource selection', {\n package: packageName,\n version: packageVersion,\n total: discovery.total\n });\n \n // No resources found\n if (discovery.total === 0) {\n out.warn('No resources found in this package');\n return [];\n }\n \n // Build grouped options by resource type\n const groupedOptions = buildGroupedOptions(discovery);\n \n if (Object.keys(groupedOptions).length === 0) {\n out.warn('No installable resources found');\n return [];\n }\n \n try {\n // Original: clackGroupMultiselect(message, groupedOptions, { selectableGroups, groupSpacing })\n // PromptPort.groupMultiselect does not support selectableGroups/groupSpacing options;\n // adapters should implement selectable groups and spacing as appropriate for their backend.\n const groups: Record<string, Array<{ label: string; value: DiscoveredResource }>> = {};\n for (const [groupName, options] of Object.entries(groupedOptions)) {\n groups[groupName] = options.map(opt => ({\n label: opt.label,\n value: opt.value\n }));\n }\n \n const selectedResources = await prm.groupMultiselect<DiscoveredResource>(\n 'Select resources to install:',\n groups\n );\n \n if (!selectedResources || selectedResources.length === 0) {\n logger.info('User cancelled resource selection or selected nothing');\n return [];\n }\n \n // Map selected DiscoveredResource objects to SelectedResource objects\n const selected: SelectedResource[] = selectedResources.map(resource => ({\n resourceType: resource.resourceType,\n resourcePath: resource.resourcePath,\n displayName: resource.displayName,\n filePath: resource.filePath,\n installKind: resource.installKind,\n version: resource.version\n }));\n \n logger.info('User selected resources', {\n count: selected.length,\n types: Array.from(new Set(selected.map(r => r.resourceType)))\n });\n \n return selected;\n } catch (error) {\n if (error instanceof UserCancellationError) {\n logger.info('User cancelled resource selection');\n return [];\n }\n throw error;\n }\n}\n\n/**\n * Build grouped options for resource selection\n */\nfunction buildGroupedOptions(\n discovery: ResourceDiscoveryResult\n): Record<string, Array<{ value: DiscoveredResource; label: string; hint: string }>> {\n const groupedOptions: Record<string, Array<{ value: DiscoveredResource; label: string; hint: string }>> = {};\n \n // Resource type display order and labels\n const typeOrder = getInstallableTypes().map(def => ({\n type: def.id as ResourceType,\n label: def.labelPlural,\n }));\n \n for (const { type, label } of typeOrder) {\n const resources = discovery.byType.get(type);\n \n if (!resources || resources.length === 0) {\n continue;\n }\n \n // Create group name with count\n const groupName = `${label} (${resources.length})`;\n const groupOptions: Array<{ value: DiscoveredResource; label: string; hint: string }> = [];\n \n // Add resources to this group\n for (const resource of resources) {\n const versionSuffix = resource.version ? ` (v${resource.version})` : '';\n const pathHint = getPathHint(resource);\n \n // Build hint: description + path\n const fullDescription = resource.description \n ? `${resource.description} - ${pathHint}`\n : pathHint;\n const truncatedDescription = truncateToLines(fullDescription, 2);\n \n groupOptions.push({\n value: resource,\n label: `${resource.displayName}${versionSuffix}`,\n hint: truncatedDescription\n });\n }\n \n groupedOptions[groupName] = groupOptions;\n }\n \n return groupedOptions;\n}\n\n/**\n * Get path hint for display\n */\nfunction getPathHint(resource: DiscoveredResource): string {\n const path = resource.resourcePath;\n \n if (resource.installKind === 'directory') {\n return `${path}/`;\n }\n \n return path;\n}\n\n/**\n * Truncate text to a maximum number of lines\n * Approximates line breaks based on typical terminal width (~80 chars per line)\n */\nfunction truncateToLines(text: string, maxLines: number): string {\n const charsPerLine = 80;\n const maxChars = charsPerLine * maxLines;\n \n if (text.length <= maxChars) {\n return text;\n }\n \n // Truncate and add ellipsis\n return text.substring(0, maxChars - 3) + '...';\n}\n\nexport async function promptCatalogSelection(\n catalog: ResourceCatalog,\n header: { name: string; version?: string; action: string },\n output?: OutputPort,\n prompt?: PromptPort\n): Promise<ResourceEntry[]> {\n const out = output ?? resolveOutput();\n const prm = prompt ?? resolvePrompt();\n \n logger.debug('Prompting catalog selection', {\n action: header.action,\n total: catalog.total\n });\n \n if (catalog.total === 0) {\n out.warn('No resources found');\n return [];\n }\n \n const { choices, indexToEntry } = buildCatalogMenuChoices(catalog);\n \n if (choices.length === 0) {\n out.warn('No selectable resources found');\n return [];\n }\n \n try {\n // Original: smartMultiselect(message, choices, categoryMap, { hint, min })\n // smartMultiselect used a categoryMap to expand category selections into child indices.\n // PromptPort.multiselect returns selected values directly; category expansion\n // is the responsibility of the adapter implementation.\n const selectedIndices = await prm.multiselect<number>(\n `Select resources to ${header.action}:`,\n choices,\n {\n hint: '- Space: select/deselect \u2022 Enter: confirm \u2022 Categories expand to all items',\n min: 1\n }\n );\n \n if (!selectedIndices || selectedIndices.length === 0) {\n logger.info('User cancelled resource selection or selected nothing');\n return [];\n }\n \n const resourceIndices = selectedIndices.filter(idx => idx >= 0);\n const selected = resourceIndices\n .filter(idx => indexToEntry.has(idx))\n .map(idx => indexToEntry.get(idx)!);\n \n logger.info('User selected resources', {\n count: selected.length,\n types: Array.from(new Set(selected.map(r => r.resourceType)))\n });\n \n return selected;\n } catch (error) {\n if (error instanceof UserCancellationError) {\n logger.info('User cancelled resource selection');\n return [];\n }\n throw error;\n }\n}\n\nfunction buildCatalogMenuChoices(\n catalog: ResourceCatalog\n): { choices: Array<{ title: string; value: number; description?: string }>; indexToEntry: Map<number, ResourceEntry> } {\n const choices: Array<{ title: string; value: number; description?: string }> = [];\n const indexToEntry = new Map<number, ResourceEntry>();\n \n const typeOrder = RESOURCE_TYPE_ORDER;\n \n let globalIndex = 0;\n let categoryIndex = -1;\n \n for (const typeId of typeOrder) {\n const entries = catalog.byType.get(typeId);\n if (!entries || entries.length === 0) continue;\n \n const label = toLabelPlural(typeId);\n const currentCategoryIndex = categoryIndex;\n \n const boldLabel = `\\x1b[1m${label} (${entries.length}):\\x1b[0m`;\n choices.push({\n title: boldLabel,\n value: currentCategoryIndex,\n description: 'Select/deselect all items in this category'\n });\n \n for (const entry of entries) {\n const versionSuffix = entry.version ? ` (v${entry.version})` : '';\n const pathHint = getCatalogPathHint(entry);\n \n const fullDescription = entry.description\n ? `${entry.description} - ${pathHint}`\n : pathHint;\n const truncatedDescription = fullDescription.length > 160\n ? fullDescription.substring(0, 157) + '...'\n : fullDescription;\n \n indexToEntry.set(globalIndex, entry);\n \n choices.push({\n title: ` ${entry.name}${versionSuffix}`,\n value: globalIndex++,\n description: truncatedDescription\n });\n }\n \n categoryIndex--;\n }\n \n return { choices, indexToEntry };\n}\n\nfunction getCatalogPathHint(entry: ResourceEntry): string {\n if (entry.origin === 'installed') {\n if (entry.files.length === 1 && entry.files[0].target) {\n return entry.files[0].target;\n }\n return `(${entry.files.length} file${entry.files.length === 1 ? '' : 's'})`;\n }\n \n if (entry.resourcePath) {\n return entry.installKind === 'directory' ? `${entry.resourcePath}/` : entry.resourcePath;\n }\n \n return '';\n}\n\nexport function displayCatalogSelectionSummary(selected: ResourceEntry[], action: string, output?: OutputPort): void {\n if (selected.length === 0) return;\n \n const out = output ?? resolveOutput();\n \n const byType = new Map<string, number>();\n for (const entry of selected) {\n byType.set(entry.resourceType, (byType.get(entry.resourceType) || 0) + 1);\n }\n \n const lines: string[] = [];\n lines.push(`\\n\u2713 Selected ${selected.length} resource${selected.length === 1 ? '' : 's'} to ${action}:`);\n \n for (const [type, count] of byType.entries()) {\n const label = toLabelPlural(type as any);\n lines.push(` \u2022 ${count} ${label.toLowerCase()}`);\n }\n \n lines.push('');\n out.info(lines.join('\\n'));\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAYA,SAAS,QAAAA,QAAM,YAAAC,YAAU,WAAAC,WAAS,YAAAC,YAAU,WAAAC,gBAAe;;;ACL3D,SAAS,YAAY,UAAU;AAC/B,SAAS,YAAY;AA2CrB,gBAAuBC,WACrB,KACA,UAAuB,CAAC,GACA;AACxB,MAAM;AAAA,IACJ;AAAA,IACA,iBAAiB;AAAA,IACjB,WAAW;AAAA,IACX,cAAc;AAAA,EAChB,IAAI;AAEJ,SAAO,kBAAkB,KAAK,QAAQ,gBAAgB,UAAU,aAAa,CAAC;AAChF;AAKA,gBAAgB,kBACd,KACA,QACA,gBACA,UACA,aACA,cACwB;AAExB,MAAI,iBAAe;AAInB,QAAI;AACF,UAAM,UAAU,MAAM,GAAG,QAAQ,KAAK,EAAE,eAAe,GAAK,CAAC;AAE7D,eAAW,SAAS,SAAS;AAC3B,YAAM,WAAW,KAAK,KAAK,MAAM,IAAI,GAGjCC,eAAc,MAAM,YAAY,GAChC,SAAS,MAAM,OAAO;AAE1B,YAAI,MAAM,eAAe,KAAK;AAC5B,cAAI;AACF,gBAAMC,QAAO,MAAM,GAAG,KAAK,QAAQ;AACnC,YAAAD,eAAcC,MAAK,YAAY,GAC/B,SAASA,MAAK,OAAO;AAAA,UACvB,QAAQ;AAEN;AAAA,UACF;AAAA,iBACS,MAAM,eAAe;AAE9B;AAIF,QAAI,UAEE,CADkB,MAAM,OAAO,UAAUD,YAAW,MAOtDA,gBAAe,gBACjB,MAAM,WAIJA,eACF,OAAO;AAAA,UACL;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA,eAAe;AAAA,QACjB,IACS,WAET,MAAM;AAAA,MAEV;AAAA,IACF,SAAS,OAAO;AAEd,UAAK,MAAgC,SAAS,YACzC,MAAgC,SAAS;AAC5C,cAAM;AAAA,IAEV;AACF;;;ACpIO,SAAS,gCAAgC,SAA2C;AACzF,MAAM,EAAE,YAAY,IAAI,iBAAiB,OAAO;AAEhD,SAAI,CAAC,eAAe,OAAO,eAAgB,WAClC,CAAC,IAGH;AAAA,IACL,MAAM,OAAO,YAAY,QAAS,WAAW,YAAY,OAAO;AAAA,IAChE,aAAa,OAAO,YAAY,eAAgB,WAAW,YAAY,cAAc;AAAA,IACrF,SAAS,8BAA8B,WAAW;AAAA,EACpD;AACF;AAEA,SAAS,8BAA8B,aAAsC;AAC3E,MAAI,CAAC,eAAe,OAAO,eAAgB;AACzC;AAGF,MAAM,UAAU,YAAY,WAAW,YAAY,UAAU;AAE7D,MAAI,OAAO,WAAY,UAAU;AAC/B,QAAM,UAAU,QAAQ,KAAK;AAC7B,WAAO,QAAQ,SAAS,IAAI,UAAU;AAAA,EACxC;AAGF;;;ACnCA,SAAS,gBAAgB;AAElB,SAAS,eAAe,UAA0B;AACvD,SAAO,SAAS,QAAQ,YAAY,EAAE,KAAK;AAC7C;AAEO,SAAS,oBAAoB,UAA0B;AAC5D,SAAO,eAAe,SAAS,QAAQ,CAAC;AAC1C;AAEO,SAAS,wBAAwB,SAAyB;AAC/D,SAAO,SAAS,OAAO;AACzB;AAEO,SAAS,sBACd,iBACA,cACQ;AACR,SAAO,mBAAmB,gBAAgB,KAAK,EAAE,SAAS,IACtD,kBACA;AACN;;;ACrBA,SAAS,QAAAE,cAAY;AACrB,SAAS,YAAYC,WAAU;;;ACD/B,SAAS,QAAAC,QAAM,YAAAC,YAAU,YAAAC,YAAU,WAAAC,gBAAe;;;ACAlD,SAAS,YAAAC,WAAU,QAAAC,OAAM,YAAAC,iBAAgB;;;ACAzC,SAAS,SAAS,kBAAkB;;;ACwC7B,SAAS,gBAAgB,OAA+B;AAC7D,MAAI,CAAC,SAAS,OAAO,SAAU;AAC7B,WAAO;AAIT,MAAM,SAAS,kBAAkB,KAAK;AACtC,MAAI;AACF,WAAO;AAIT,MAAM,cAAc,qBAAqB,KAAK;AAC9C,MAAI;AACF,WAAO;AAIT,MAAM,QAAQ,eAAe,KAAK;AAClC,MAAI;AACF,WAAO;AAIT,MAAM,aAAa,mBAAmB,KAAK;AAC3C,SAAI,cAIG;AACT;AAYO,SAAS,qBAAqB,OAA+B;AAClE,MAAI,CAAC,MAAM,WAAW,KAAK;AACzB,WAAO;AAGT,MAAM,YAAY,MAAM,MAAM,CAAC;AAE/B,MAAI,CAAC;AACH,UAAM,IAAI;AAAA,MACR;AAAA;AAAA;AAAA;AAAA;AAAA,IAIF;AAGF,MAAM,WAAW,UAAU,MAAM,GAAG,EAAE,OAAO,OAAK,EAAE,SAAS,CAAC;AAE9D,MAAI,SAAS,SAAS;AACpB,UAAM,IAAI;AAAA,MACR,6BAA6B,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA,IAIpC;AAGF,MAAM,QAAQ,SAAS,CAAC,GAClB,OAAO,SAAS,CAAC,GAEjB,MAAM,mBAAmB,OAAO,IAAI,GACpCC,QAAO,SAAS,SAAS,IAAI,SAAS,MAAM,CAAC,EAAE,KAAK,GAAG,IAAI;AAEjE,gBAAO,MAAM,2BAA2B,EAAE,OAAO,OAAO,MAAM,MAAAA,OAAM,IAAI,CAAC,GAElE;AAAA,IACL;AAAA,IACA,KAAK;AAAA;AAAA,IACL,MAAAA;AAAA,EACF;AACF;AAcO,SAAS,eAAe,OAA+B;AAC5D,MAAI;AAEJ,MAAI;AACF,UAAM,IAAI,IAAI,KAAK;AAAA,EACrB,QAAQ;AACN,WAAO;AAAA,EACT;AAGA,MAAI,IAAI,aAAa;AACnB,WAAO;AAGT,MAAM,WAAW,IAAI,SAAS,MAAM,GAAG,EAAE,OAAO,OAAK,EAAE,SAAS,CAAC;AAEjE,MAAI,SAAS,SAAS;AACpB,UAAM,IAAI;AAAA,MACR;AAAA;AAAA,OACQ,KAAK;AAAA,IACf;AAGF,MAAM,QAAQ,SAAS,CAAC,GACpB,OAAO,SAAS,CAAC;AAGrB,EAAI,KAAK,SAAS,MAAM,MACtB,OAAO,KAAK,MAAM,GAAG,EAAE;AAGzB,MAAM,gBAAgB,mBAAmB,OAAO,IAAI,GAChD,KACAA;AAGJ,MAAI,SAAS,SAAS,GAAG;AACvB,QAAM,WAAW,SAAS,CAAC;AAE3B,QAAI,aAAa;AACf,YAAM,IAAI;AAAA,QACR;AAAA;AAAA;AAAA,IAEK,KAAK;AAAA;AAAA;AAAA,0CAE4B,KAAK,IAAI,IAAI;AAAA,2CACZ,KAAK,IAAI,IAAI;AAAA,4CACZ,KAAK,IAAI,IAAI;AAAA,yBAChC,KAAK,IAAI,IAAI;AAAA,MACpC;AAGF,QAAI,aAAa,QAAQ;AACvB,UAAI,SAAS,SAAS;AACpB,cAAM,IAAI;AAAA,UACR;AAAA;AAAA,OACQ,KAAK;AAAA;AAAA,+BACmB,KAAK,IAAI,IAAI;AAAA,QAC/C;AAGF,YAAM,mBAAmB,SAAS,CAAC,CAAC,GAGhC,SAAS,SAAS,MACpBA,QAAO,SAAS,MAAM,CAAC,EAAE,IAAI,OAAK,mBAAmB,CAAC,CAAC,EAAE,KAAK,GAAG;AAAA,IAErE;AAAA,EACF;AAEA,gBAAO,MAAM,qBAAqB,EAAE,OAAO,OAAO,MAAM,KAAK,MAAAA,OAAM,KAAK,cAAc,CAAC,GAEhF;AAAA,IACL,KAAK;AAAA,IACL;AAAA,IACA,MAAAA;AAAA,EACF;AACF;AAgBO,SAAS,mBAAmB,OAA+B;AAChE,MAAI,CAAC,SAAS,KAAK;AACjB,WAAO;AAIT,MAAM,CAAC,SAAS,QAAQ,IAAI,MAAM,MAAM,KAAK,CAAC,GAExC,SAAkB;AAAA,IACtB,KAAK;AAAA,EACP;AAGA,MAAI,UAAU;AACZ,QAAM,EAAE,KAAK,MAAAA,MAAK,IAAI,kBAAkB,UAAU,KAAK;AACvD,IAAI,QAAK,OAAO,MAAM,MAClBA,UAAM,OAAO,OAAOA;AAAA,EAC1B;AAEA,gBAAO,MAAM,0BAA0B,EAAE,OAAO,GAAG,OAAO,CAAC,GAEpD;AACT;AAYA,SAAS,kBAAkB,OAA+B;AAExD,MAAI,MAAM,WAAW,SAAS,GAAG;AAC/B,WAAO,KAAK,+EAAqE;AAEjF,QAAM,YAAY,MAAM,MAAM,CAAC,GACzB,CAAC,UAAU,QAAQ,IAAI,UAAU,MAAM,KAAK,CAAC,GAC7C,CAAC,OAAO,IAAI,IAAI,SAAS,MAAM,GAAG;AAExC,QAAI,CAAC,SAAS,CAAC;AACb,YAAM,IAAI;AAAA,QACR,wBAAwB,KAAK;AAAA,MAC/B;AAIF,QAAM,SAAkB,EAAE,KADd,mBAAmB,OAAO,IAAI,EACZ;AAE9B,QAAI,UAAU;AACZ,UAAM,EAAE,KAAK,MAAAA,MAAK,IAAI,kBAAkB,UAAU,KAAK;AACvD,MAAI,QAAK,OAAO,MAAM,MAClBA,UAAM,OAAO,OAAOA;AAAA,IAC1B;AAEA,WAAO;AAAA,EACT;AAGA,MAAI,MAAM,WAAW,MAAM,GAAG;AAC5B,WAAO,KAAK,sEAA4D;AAExE,QAAM,YAAY,MAAM,MAAM,CAAC,GACzB,CAAC,KAAK,QAAQ,IAAI,UAAU,MAAM,KAAK,CAAC;AAE9C,QAAI,CAAC;AACH,YAAM,IAAI;AAAA,QACR,qBAAqB,KAAK;AAAA,MAC5B;AAGF,QAAM,SAAkB,EAAE,IAAI;AAE9B,QAAI,UAAU;AACZ,UAAM,EAAE,KAAK,MAAAA,MAAK,IAAI,kBAAkB,UAAU,KAAK;AACvD,MAAI,QAAK,OAAO,MAAM,MAClBA,UAAM,OAAO,OAAOA;AAAA,IAC1B;AAEA,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAgBA,SAAS,kBACP,UACA,WACiC;AACjC,MAAM,SAA0C,CAAC,GAG3C,QAAQ,SAAS,MAAM,GAAG;AAEhC,WAAW,QAAQ;AACjB,QAAK;AAEL,UAAI,KAAK,SAAS,GAAG,GAAG;AAEtB,YAAM,UAAU,KAAK,QAAQ,GAAG,GAC1B,MAAM,KAAK,MAAM,GAAG,OAAO,GAC3B,QAAQ,KAAK,MAAM,UAAU,CAAC;AAEpC,YAAI,QAAQ,UAAU,QAAQ;AAC5B,iBAAO,OAAO;AAAA;AAEd,gBAAM,IAAI;AAAA,YACR,2BAA2B,QAAQ;AAAA;AAAA,qBACb,GAAG;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAK3B;AAAA,MAEJ,OAAO;AAEL,YAAI,OAAO;AACT,gBAAM,IAAI;AAAA,YACR;AAAA;AAAA,QACS,QAAQ;AAAA;AAAA;AAAA,UAEnB;AAEF,eAAO,MAAM;AAAA,MACf;AAGF,SAAO;AACT;AASO,SAAS,SAAS,OAAwB;AAE/C,MAAM,UAAU,MAAM,MAAM,KAAK,CAAC,EAAE,CAAC;AACrC,SACE,MAAM,WAAW,UAAU,KAC3B,MAAM,WAAW,SAAS,KAC1B,MAAM,WAAW,QAAQ,KACzB,MAAM,WAAW,QAAQ,KACzB,MAAM,WAAW,MAAM,KACvB,QAAQ,SAAS,MAAM;AAE3B;AASO,SAAS,mBAAmB,OAAe,MAAsB;AAEtE,MAAM,YAAY,KAAK,SAAS,MAAM,IAAI,KAAK,MAAM,GAAG,EAAE,IAAI;AAC9D,SAAO,sBAAsB,KAAK,IAAI,SAAS;AACjD;;;ADpWA,eAAsB,qBACpB,KACA,MAAc,QAAQ,IAAI,GACW;AAErC,MAAM,UAAU,gBAAgB,GAAG;AACnC,MAAI;AACF,WAAO;AAAA,MACL,MAAM;AAAA,MACN,QAAQ,QAAQ;AAAA,MAChB,QAAQ,QAAQ;AAAA,MAChB,SAAS,QAAQ;AAAA,IACnB;AAIF,MAAM,gBAAgB,IAAI,SAAS,cAAc,SAAS,KAAK,IAAI,SAAS,cAAc,YAAY,GAGhG,gBAAgB,IAAI,WAAW,GAAG,KAClB,IAAI,WAAW,IAAI,KACnB,IAAI,WAAW,KAAK,KACpB,QAAQ,OACR,IAAI,WAAW,IAAI,KACnB,IAAI,WAAW,aAAa,cAAc,GAAG;AAAA,EAC5C,WAAW,GAAG,KAAK,CAAC,IAAI,SAAS,GAAG;AAE3D,MAAI,iBAAiB,eAAe;AAClC,QAAM,eAAe,WAAW,GAAG,IAAI,MAAM,QAAQ,KAAK,GAAG;AAE7D,QAAI,iBACE,MAAM,OAAO,YAAY;AAC3B,aAAO,EAAE,MAAM,WAAW,aAAa;AAO3C,QAAM,UAAU,MAAM,wBAAwB,YAAY,GACpD,kBAAkB,MAAM,iBAAiB,YAAY;AAE3D,QAAI,WAAW,gBAAgB;AAC7B,aAAO,EAAE,MAAM,aAAa,aAAa;AAI3C,QAAI,MAAM,OAAO,YAAY;AAC3B,YAAM,IAAI;AAAA,QACR,SAAS,GAAG,wGACmB,cAAc,eAAe,OAAO,oBAAoB,eAAe;AAAA,MACxG;AAAA,EAEJ;AAIA,MAAI,CAAC,iBAAiB,CAAC,eAAe;AACpC,QAAM,aAAa,MAAM,qBAAqB;AAAA,MAC5C;AAAA,MACA,aAAa;AAAA,MACb,UAAU;AAAA;AAAA,MACV,iBAAiB;AAAA;AAAA,MACjB,cAAc;AAAA;AAAA,MACd,gBAAgB;AAAA;AAAA,IAClB,CAAC;AAED,QAAI,WAAW,SAAS,WAAW;AACjC,oBAAO,KAAK,6CAA6C;AAAA,QACvD,aAAa;AAAA,QACb,MAAM,WAAW;AAAA,QACjB,YAAY,WAAW;AAAA,MACzB,CAAC,GAEM;AAAA,QACL,MAAM;AAAA,QACN,cAAc,WAAW;AAAA,QACzB,sBAAsB,WAAW;AAAA,MACnC;AAAA,EAEJ;AAGA,MAAI;AACF,QAAM,EAAE,MAAM,SAAS,aAAa,IAAI,wBAAwB,GAAG;AACnE,WAAO,EAAE,MAAM,YAAY,MAAM,SAAS,aAAa;AAAA,EACzD,QAAgB;AAEd,WAAO,EAAE,MAAM,YAAY,MAAM,IAAI;AAAA,EACvC;AACF;;;AE9IA,SAAS,YAAAC,WAAU,YAAAC,iBAAgB;AACnC,OAAOC,aAAY;;;ACDnB,YAAY,YAAY;AAoBjB,SAAS,kBAAkB,SAA+B;AAC/D,MAAI,CAAC,WAAW,QAAQ,KAAK,MAAM;AACjC,UAAM,IAAI,MAAM,yBAAyB;AAG3C,MAAM,UAAU,QAAQ,KAAK;AAG7B,MAAI,YAAY,OAAO,YAAY;AACjC,WAAO;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,OAAO;AAAA,MACP,UAAU;AAAA,IACZ;AAIF,MAAI,QAAQ,WAAW,GAAG,GAAG;AAC3B,QAAM,cAAc,QAAQ,UAAU,CAAC;AACvC,QAAI,CAAQ,aAAM,WAAW;AAC3B,YAAM,IAAI,MAAM,yCAAyC,WAAW,EAAE;AAExE,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ;AAAA,EACF;AAGA,MAAI,QAAQ,WAAW,GAAG,GAAG;AAC3B,QAAM,cAAc,QAAQ,UAAU,CAAC;AACvC,QAAI,CAAQ,aAAM,WAAW;AAC3B,YAAM,IAAI,MAAM,yCAAyC,WAAW,EAAE;AAExE,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA,OAAO;AAAA,MACP,UAAU;AAAA,IACZ;AAAA,EACF;AAGA,MAAI,QAAQ,MAAM,UAAU,GAAG;AAC7B,QAAI,CAAQ,kBAAW,OAAO;AAC5B,YAAM,IAAI,MAAM,6BAA6B,OAAO,EAAE;AAIxD,WAAO;AAAA,MACL,MAAM;AAAA,MACN,aAHyB,kBAAW,OAAO,GAAG,WAAW;AAAA,MAIzD,OAAO;AAAA,MACP,UAAU;AAAA,IACZ;AAAA,EACF;AAGA,MAAW,aAAM,OAAO;AACtB,WAAO;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,OAAO;AAAA,MACP,UAAU;AAAA,IACZ;AAGF,QAAM,IAAI,MAAM,2BAA2B,OAAO,EAAE;AACtD;AAKO,SAAS,iBAAiB,SAAiB,OAAwB;AACxE,MAAI;AAEF,WAAc,iBAAU,SAAS,OAAO,EAAE,mBAAmB,GAAK,CAAC;AAAA,EACrE,QAAgB;AACd,WAAO;AAAA,EACT;AACF;AAKO,SAAS,gBAAgB,mBAA6B,OAA8B;AACzF,MAAI;AAEF,QAAM,iBAAiB,kBACpB,OAAO,OAAY,aAAM,CAAC,CAAC,EAC3B,KAAK,CAAC,GAAG,MAAa,eAAQ,GAAG,CAAC,CAAC;AAGtC,WAAc,qBAAc,gBAAgB,OAAO,EAAE,mBAAmB,GAAK,CAAC;AAAA,EAChF,QAAgB;AACd,WAAO;AAAA,EACT;AACF;AAKO,SAAS,iBAAiB,mBAA4C;AAC3E,MAAM,gBAAgB,kBAAkB,OAAO,OAAY,aAAM,CAAC,CAAC;AACnE,SAAI,cAAc,WAAW,IAAU,OAEhC,cAAc,KAAK,CAAC,GAAG,MAAa,eAAQ,GAAG,CAAC,CAAC,EAAE,CAAC;AAC7D;AAKO,SAAS,iBAAiB,SAAyB;AACxD,MAAI,CAAQ,aAAM,OAAO;AACvB,UAAM,IAAI,MAAM,oCAAoC,OAAO,EAAE;AAE/D,SAAO,IAAI,OAAO;AACpB;AAeO,SAAS,eAAe,SAA0B;AACvD,MAAI;AAEF,WADe,kBAAkB,OAAO,EAC1B,SAAS;AAAA,EACzB,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AA2CO,SAAS,oBAAoB,SAAiB,mBAA4C;AAC/F,MAAI;AACF,QAAM,SAAS,kBAAkB,OAAO;AAExC,YAAQ,OAAO,MAAM;AAAA,MACnB,KAAK;AACH,eAAO,kBAAkB,SAAS,OAAO,WAAW,IAAI,OAAO,cAAc;AAAA,MAC/E,KAAK;AACH,eAAO,iBAAiB,iBAAiB;AAAA,MAC3C;AAEE,eAAO,gBAAgB,mBAAmB,OAAO,KAAK;AAAA,IAC1D;AAAA,EACF,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKO,SAAS,oBAAoB,SAA0B;AAC5D,MAAM,SAAgB,aAAM,OAAO;AACnC,SAAO,GAAQ,UAAU,OAAO,WAAW,SAAS;AACtD;AA2DO,SAAS,4BAA4B,OAAwB;AAClE,MAAM,UAAU,MAAM,KAAK;AAU1B,MATG,CAAC,WAAW,YAAY,OAAO,QAAQ,YAAY,MAAM,YASxD,CAAC,QAAQ,SAAS,GAAG;AACvB,WAAO;AAGV,MAAI;AACF,QAAM,cAAc,IAAW,aAAM,SAAS,EAAE,mBAAmB,GAAK,CAAC;AACzE,aAAW,iBAAiB,YAAY;AACtC,eAAW,cAAc;AACvB,YAAI,WAAW,OAAO,WAAW,SAAS;AACxC,iBAAO;AAAA,EAIf,QAAQ;AACN,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAKO,SAAS,2BACd,mBACA,OACA,SACwB;AACxB,MAAM,cAAc,kBAAkB,KAAK,GACrC,UAAU,oBAAoB,iBAAiB,GAC/C,kBAAkB,iBAAiB,QAAQ,OAAO,aAAW,CAAC,oBAAoB,OAAO,CAAC,CAAC,GAC3F,sBAAsB,iBAAiB,QAAQ,OAAO,aAAW,oBAAoB,OAAO,CAAC,CAAC,GAC9F,mBAA6B,CAAC,GAC9B,uBAAiC,CAAC,GAElC,SAAiC;AAAA,IACrC,SAAS;AAAA,IACT,cAAc;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,EACV,GAEM,SAAS,MACN;AAGT,MAAI,YAAY,SAAS,SAAS;AAChC,WAAO,SAAS;AAChB,QAAM,aAAa,QAAQ,KAAK,aAAkB,UAAG,SAAS,YAAY,WAAW,CAAC;AACtF,WAAI,eACE,oBAAoB,UAAU,KAChC,qBAAqB,KAAK,UAAU,GACpC,OAAO,eAAe,MAEtB,iBAAiB,KAAK,UAAU,GAElC,OAAO,UAAU,aAEZ,OAAO;AAAA,EAChB;AAEA,MAAM,kBAAkB,YAAY,SAAS,aAAa,MAAM,YAAY;AAC5E,mBAAiB;AAAA,IACf,GAAG,iBAAiB,iBAAiB,iBAAiB,EAAK;AAAA,EAC7D,GACA,qBAAqB;AAAA,IACnB,GAAG,iBAAiB,qBAAqB,iBAAiB,EAAI;AAAA,EAChE;AAGA,MAAM,gBAAgB,iBAAiB;AAAA,IACrC,GAAG;AAAA,IACH,GAAG;AAAA,EACL,CAAC;AAQD,MANI,YAAY,SAAS,aACvB,OAAO,SAAS,aAEhB,OAAO,SAAS,SAGd,cAAc,WAAW;AAC3B,WAAO,OAAO;AAGhB,MAAM,WAAW,cAAc,CAAC;AAChC,gBAAO,UAAU,UACjB,OAAO,eAAe,oBAAoB,QAAQ,GAC3C,OAAO;AAChB;AAEA,SAAS,oBAAoB,UAA8B;AACzD,MAAM,OAAO,oBAAI,IAAY;AAC7B,WAAW,WAAW;AACpB,IAAI,CAAC,WAAW,CAAQ,aAAM,OAAO,KAAK,KAAK,IAAI,OAAO,KAG1D,KAAK,IAAI,OAAO;AAElB,SAAO,MAAM,KAAK,IAAI;AACxB;AAEA,SAAS,iBAAiB,UAA8B;AACtD,SAAO,SAAS,MAAM,EAAE,KAAY,eAAQ;AAC9C;AAEA,SAAS,iBACP,UACA,OACA,mBACU;AACV,MAAI;AACF,WAAO;AAAA,MACL,SAAS,OAAO,aAAkB,iBAAU,SAAS,OAAO,EAAE,kBAAkB,CAAC,CAAC;AAAA,IACpF;AAAA,EACF,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AACF;;;ACnaA,YAAYC,aAAY;AAqBjB,SAAS,mBAAmB,SAAyB;AAC1D,MAAM,cAAc,QAAQ,QAAQ,GAAG,GACjC,YAAY,gBAAgB,KAAK,UAAU,QAAQ,MAAM,GAAG,WAAW,GAEvE,SAAgB,cAAM,SAAS;AACrC,SAAI,SACK,GAAG,OAAO,KAAK,IAAI,OAAO,KAAK,IAAI,OAAO,KAAK,KAEjD;AACT;;;AC/BA,YAAYC,aAAY;AACxB,OAAO,UAAU;AA2FV,SAAS,qBAAqB,SAAkC;AACrE,SAAgC,WAAY,QAAQ,YAAY;AAClE;AAKO,SAAS,mBAAmB,SAAiC;AAClE,SAAO,qBAAqB,OAAO,IAAI,cAAe;AACxD;;;ACrGA,SAAS,QAAAC,OAAM,YAAAC,WAAU,SAAS,YAAAC,iBAAgB;AAClD,SAAS,UAAAC,eAAc;;;ACDvB,SAAe,gBAAgB;AAI/B,SAAS,cAAc;;;ACkBhB,SAAS,wBAAwB,QAAiD;AACvF,MAAM,MAAM,oBAAI,KAAK,GAEf,iBAAiC;AAAA,IACrC,MAAM,OAAO;AAAA,IACb,UAAU,OAAO;AAAA,IACjB,YAAY;AAAA,IACZ,YAAY,OAAO;AAAA,EACrB,GAEM,gBAA6B;AAAA,IACjC,MAAM,OAAO;AAAA,IACb,UAAU,OAAO;AAAA,EACnB;AAEA,gBAAO,MAAM,0CAA0C;AAAA,IACrD,MAAM,OAAO;AAAA,IACb,UAAU,OAAO;AAAA,IACjB,YAAY,OAAO;AAAA,EACrB,CAAC,GAEM;AAAA,IACL;AAAA,IACA;AAAA,IACA,mBAAmB,CAAC;AAAA,IACpB,gBAAgB;AAAA,EAClB;AACF;AAQO,SAAS,yBAAyB,KAAwC;AAE/E,MAAM,SAAS,IAAI,WAAW,oBAAoB,IAAI,KAAK;AAE3D,gBAAO,MAAM,iCAAiC;AAAA,IAC5C,MAAM,IAAI,SAAS;AAAA,IACnB,WAAW,CAAC,CAAC,IAAI;AAAA,IACjB,cAAc,OAAO;AAAA,IACrB,kBAAkB,OAAO;AAAA,EAC3B,CAAC,GAEM,wBAAwB,MAAM;AACvC;AAQO,SAAS,sBACd,UACA,aAAqB,GACK;AAG1B,MAAM,iBAAiC;AAAA,IACrC,MAAM;AAAA,IACN;AAAA,IACA,YALU,oBAAI,KAAK;AAAA,IAMnB;AAAA,EACF,GAEM,gBAA6B;AAAA,IACjC,MAAM;AAAA,IACN;AAAA,EACF;AAEA,gBAAO,MAAM,qCAAqC,EAAE,UAAU,WAAW,CAAC,GAEnE;AAAA,IACL;AAAA,IACA;AAAA,IACA,mBAAmB,CAAC;AAAA,IACpB,gBAAgB;AAAA,EAClB;AACF;AAwCO,SAAS,mBACd,SACA,gBAC0B;AAC1B,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,EACF;AACF;AAQO,SAAS,6BACd,SACA,WACA,gBAC0B;AAC1B,MAAM,YAAY,oBAAI,KAAK,GAErB,aAAa;AAAA,IACjB,GAAG,QAAQ;AAAA,IACX;AAAA,MACE,MAAM,QAAQ;AAAA,MACd,IAAI;AAAA,MACJ;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,gBAAO,MAAM,oCAAoC;AAAA,IAC/C,MAAM;AAAA,MACJ,MAAM,QAAQ,cAAc;AAAA,MAC5B,UAAU,QAAQ,cAAc;AAAA,IAClC;AAAA,IACA,IAAI;AAAA,MACF,MAAM,UAAU;AAAA,MAChB,UAAU,UAAU;AAAA,IACtB;AAAA,IACA;AAAA,IACA,kBAAkB,WAAW;AAAA,EAC/B,CAAC,GAEM;AAAA,IACL,GAAG;AAAA,IACH,eAAe;AAAA,IACf,mBAAmB;AAAA,IACnB;AAAA,EACF;AACF;;;AC/KO,SAAS,iBACd,SACA,SAC6B;AAC7B,MAAM,iBAAiB,SAAS,kBAAkB;AAElD,SAAO;AAAA,IACL,gBAAgB;AAAA,MACd,MAAM,QAAQ,eAAe;AAAA,MAC7B,UAAU,QAAQ,eAAe;AAAA,MACjC,YAAY,QAAQ,eAAe,WAAW,YAAY;AAAA,MAC1D,YAAY,QAAQ,eAAe;AAAA,IACrC;AAAA,IACA,eAAe;AAAA,MACb,MAAM,QAAQ,cAAc;AAAA,MAC5B,UAAU,QAAQ,cAAc;AAAA,IAClC;AAAA,IACA,mBAAmB,iBAAiB,QAAQ,kBAAkB,IAAI,aAAW;AAAA,MAC3E,MAAM;AAAA,QACJ,MAAM,OAAO,KAAK;AAAA,QAClB,UAAU,OAAO,KAAK;AAAA,MACxB;AAAA,MACA,IAAI;AAAA,QACF,MAAM,OAAO,GAAG;AAAA,QAChB,UAAU,OAAO,GAAG;AAAA,MACtB;AAAA,MACA,gBAAgB,OAAO;AAAA,MACvB,WAAW,OAAO,UAAU,YAAY;AAAA,IAC1C,EAAE,IAAI,CAAC;AAAA,IACP,gBAAgB,QAAQ;AAAA,EAC1B;AACF;AAqDO,SAAS,cACd,SACA,SACQ;AACR,MAAM,aAAa,iBAAiB,SAAS,OAAO,GAC9C,SAAS,SAAS,UAAU;AAElC,SAAO,KAAK,UAAU,YAAY,MAAM,SAAS,IAAI,CAAC;AACxD;;;ACzDA,IAAM,oBAAoB;AAAA,EACxB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAKM,qBAA+C;AAAA,EACnD,WAAW;AAAA,EACX,kBAAkB;AAAA,EAClB,WAAW;AAAA,EACX,aAAa;AAAA,EACb,UAAU;AAAA,EACV,YAAY;AAAA,EACZ,aAAa;AAAA,EACb,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,aAAa;AAAA,EACb,YAAY;AAAA,EACZ,UAAU;AACZ;AAKO,SAAS,oBAAoB,OAAqC;AAMvE,MAJgC,MAAM;AAAA,IAAK,OACzC,EAAE,SAAS;AAAA,EACb;AAGE,WAAO;AAAA,MACL,MAAM;AAAA,MACN,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,UAAU;AAAA,QACR,gBAAgB;AAAA,QAChB,uBAAuB,MAAM;AAAA,QAC7B,mBAAmB,oBAAI,IAAI,CAAC,CAAC,iBAAiB,MAAM,MAAM,CAAC,CAAC;AAAA,QAC5D,YAAY,MAAM;AAAA,QAClB,aAAa;AAAA,UACX,WAAW,CAAC;AAAA,UACZ,kBAAkB,CAAC,4BAA4B;AAAA,QACjD;AAAA,MACF;AAAA,IACF;AAGF,MAAM,WAA2B;AAAA,IAC/B,gBAAgB;AAAA,IAChB,uBAAuB;AAAA,IACvB,mBAAmB,oBAAI,IAAI;AAAA,IAC3B,YAAY,MAAM;AAAA,IAClB,aAAa;AAAA,MACX,WAAW,CAAC;AAAA,MACZ,kBAAkB,CAAC;AAAA,IACrB;AAAA,EACF;AAGA,WAAW,QAAQ,OAAO;AACxB,QAAM,iBAAiB,aAAa,KAAK,IAAI;AAE7C,QAAI,eAAe,SAAS;AAC1B,eAAS,kBACL,SAAS,YAAY,UAAU,SAAS,KAC1C,SAAS,YAAY,UAAU,KAAK,KAAK,IAAI;AAAA,aAEtC,eAAe,SAAS,uBAAuB,eAAe,UAAU;AACjF,eAAS;AACT,UAAM,QAAQ,SAAS,kBAAkB,IAAI,eAAe,QAAQ,KAAK;AACzE,eAAS,kBAAkB,IAAI,eAAe,UAAU,QAAQ,CAAC,GAE7D,SAAS,YAAY,iBAAiB,SAAS,KACjD,SAAS,YAAY,iBAAiB,KAAK,KAAK,IAAI;AAAA,IAExD;AAAA,EACF;AAGA,SAAO,gBAAgB,QAAQ;AACjC;AAKA,SAAS,aAAaC,OAGpB;AAEA,MAAM,YADQA,MAAK,MAAM,GAAG,EACJ,CAAC;AAGzB,MAAI,aAAa;AACf,WAAO;AAAA,MACL,MAAM;AAAA,MACN,UAAU,mBAAmB,SAAS;AAAA,IACxC;AAKF,MAAM,iBAAiB,8BAA8BA,KAAI;AACzD,SAAI,iBACK;AAAA,IACL,MAAM;AAAA,IACN,UAAU;AAAA,EACZ,IAIE,kBAAkB,SAAS,SAAS,IAC/B,EAAE,MAAM,YAAY,IAItB,EAAE,MAAM,QAAQ;AACzB;AAKA,SAAS,8BAA8BA,OAA+B;AACpE,MAAM,QAAQA,MAAK,MAAM,GAAG,GAEtB,YADW,MAAM,MAAM,SAAS,CAAC,EACZ,MAAM,GAAG;AAGpC,MAAI,UAAU,UAAU,GAAG;AACzB,QAAM,mBAAmB,UAAU,UAAU,SAAS,CAAC;AACvD,QAAI,aAAa,gBAAgB;AAC/B,aAAO;AAAA,EAEX;AAEA,SAAO;AACT;AAKA,SAAS,gBAAgB,UAAyC;AAChE,MAAM,EAAE,gBAAgB,uBAAuB,mBAAmB,WAAW,IAAI;AAGjF,MAAI,eAAe;AACjB,WAAO;AAAA,MACL,MAAM;AAAA,MACN,YAAY;AAAA,MACZ;AAAA,IACF;AAIF,MAAM,iBAAiB,iBAAiB,YAClC,gBAAgB,wBAAwB;AAG9C,MAAI,iBAAiB;AACnB,WAAO;AAAA,MACL,MAAM;AAAA,MACN,YAAY;AAAA,MACZ;AAAA,IACF;AAIF,MAAI,gBAAgB,KAAK;AAEvB,QAAI,kBACA,WAAW;AAEf,aAAW,CAAC,UAAU,KAAK,KAAK;AAC9B,MAAI,QAAQ,aACV,WAAW,OACX,mBAAmB;AAIvB,QAAI;AACF,aAAO;AAAA,QACL,MAAM;AAAA,QACN,UAAU;AAAA,QACV,YAAY;AAAA,QACZ;AAAA,MACF;AAAA,EAEJ;AAGA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,YAAY,KAAK,IAAI,gBAAgB,GAAG;AAAA,IACxC;AAAA,EACF;AACF;AAKO,SAAS,mBAAmB,QAAgC;AACjE,SAAO,OAAO,SAAS,uBAAuB,OAAO,aAAa;AACpE;AAKO,SAAS,gBACd,QACA,gBACS;AAET,SAAI,OAAO,SAAS,cACX,KAIL,OAAO,SAAS,uBAAuB,OAAO,WACzC,OAAO,aAAa,iBAGtB;AACT;AAQO,SAAS,+BAA+B,OAG7C;AACA,MAAM,SAAS,oBAAoB,KAAK,GAClC,UAAU,wBAAwB,MAAM;AAE9C,SAAO,EAAE,QAAQ,QAAQ;AAC3B;AAsBA,eAAsB,4BACpB,OACA,WACgC;AAEhC,MAAM,EAAE,uBAAuB,+BAA+B,uBAAuB,IACnF,MAAM,OAAO,uCAA8B,GACvC,EAAE,kBAAkB,IAAI,MAAM,OAAO,oCAA2B,GAChE;AAAA,IACJ;AAAA,IACA;AAAA,IACA,wBAAwB;AAAA,IACxB;AAAA,EACF,IAAI,MAAM,OAAO,4CAAmC,GAG9C,UAAU,sBAAsB,OAAO,SAAS;AAGtD,MAAI,uBAAuB,OAAO,GAAG;AACnC,QAAM,kBAAkB,8BAA8B,OAAO;AAE7D,WAAO;AAAA,MACL,eAAe;AAAA,MACf,iBAAiB;AAAA,MACjB,YAAY;AAAA;AAAA;AAAA;AAAA,MAIZ,cAAc,oBAAI,IAAI,CAAC,CAAC,iBAAiB,MAAM,IAAI,OAAK,EAAE,IAAI,CAAC,CAAC,CAAC;AAAA,MACjE,SAAS;AAAA,QACP,iBAAiB,QAAQ,QAAQ,IAAI,QAAM;AAAA,UACzC,YAAY,EAAE;AAAA,UACd,SAAS,EAAE;AAAA,QACb,EAAE;AAAA,QACF,mBAAmB,QAAQ;AAAA,QAC3B,eAAe,QAAQ;AAAA,MACzB;AAAA,MACA,UAAU;AAAA,QACR,YAAY,MAAM;AAAA,QAClB,eAAe;AAAA;AAAA,QACf,cAAc,MAAM;AAAA,QACpB,oBAAoB,oBAAI,IAAI,CAAC,CAAC,iBAAiB,MAAM,MAAM,CAAC,CAAC;AAAA,MAC/D;AAAA,IACF;AAAA,EACF;AAIA,OAAK,QAAQ,qBAAqB,QAAQ,kBAAkB,QAAQ,QAAQ,WAAW;AACrF,WAAO;AAAA,MACL,eAAe;AAAA,MACf,iBAAiB;AAAA,MACjB,YAAY;AAAA,MACZ,cAAc,oBAAI,IAAI,CAAC,CAAC,aAAa,MAAM,IAAI,OAAK,EAAE,IAAI,CAAC,CAAC,CAAC;AAAA,MAC7D,SAAS;AAAA,QACP,iBAAiB,CAAC;AAAA,QAClB,mBAAmB,QAAQ;AAAA,QAC3B,eAAe,QAAQ;AAAA,MACzB;AAAA,MACA,UAAU;AAAA,QACR,YAAY,MAAM;AAAA,QAClB,eAAe;AAAA,QACf,cAAc,MAAM;AAAA,QACpB,oBAAoB,oBAAI,IAAI,CAAC,CAAC,aAAa,MAAM,MAAM,CAAC,CAAC;AAAA,MAC3D;AAAA,IACF;AAKF,MAAM,cAAc,kBAAkB,OAAO,SAAS,GAGhD,eAAe,0BAA0B,WAAW,GACpD,aAAa,2BAA2B,cAAc,WAAW,GACjE,gBAAgB,0BAA0B,YAAY,GACtD,eAAe,qBAAqB,WAAW,GAG/C,gBAAgB,YAAY,MAC5B,eAAe,MAAM,SAAS;AAEpC,SAAO;AAAA,IACL;AAAA,IACA,iBAAiB;AAAA,IACjB;AAAA,IACA;AAAA,IACA;AAAA,IACA,SAAS,QAAQ,QAAQ,SAAS,IAAI;AAAA,MACpC,iBAAiB,QAAQ,QAAQ,IAAI,QAAM;AAAA,QACzC,YAAY,EAAE;AAAA,QACd,SAAS,EAAE;AAAA,MACb,EAAE;AAAA,MACF,mBAAmB,QAAQ;AAAA,MAC3B,eAAe,QAAQ;AAAA,IACzB,IAAI;AAAA,IACJ,UAAU;AAAA,MACR,YAAY,MAAM;AAAA,MAClB;AAAA,MACA;AAAA,MACA,oBAAoB,aAAa;AAAA,IACnC;AAAA,EACF;AACF;;;AC3aA,SAAS,QAAAC,aAAY;AAoErB,eAAsB,sBACpB,WACA,kBACiC;AACjC,MAAM,eAAeC,MAAK,WAAW,oBAAoB,eAAe,GAClE,gBAAgB,MAAM,OAAO,YAAY,GACzC,gBAAgB,kBAAkB,WAAW;AAGnD,MAAI,eAAe;AACjB,QAAI;AAEJ,QAAI;AACF,UAAM,UAAU,MAAM,aAAa,YAAY;AAC/C,uBAAiB,KAAK,MAAM,OAAO;AAAA,IACrC,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,sCAAsC,YAAY,KAAK,KAAK;AAAA,MAC9D;AAAA,IACF;AAGA,WAAI,iBACF,OAAO;AAAA,MACL,cAAc,SAAS;AAAA,IAEzB,GAEO;AAAA,MACL,UAAU;AAAA,MACV,QAAQ;AAAA,IACV,KAIE,mBAGK;AAAA,MACL,UAHa,oBAAoB,gBAAgB,gBAAgB;AAAA,MAIjE,QAAQ;AAAA,IACV,IAIK;AAAA,MACL,UAAU;AAAA,MACV,QAAQ;AAAA,IACV;AAAA,EACF;AAGA,MAAI,iBAAiB;AAGnB,WAAO;AAAA,MACL,UAHe,2BAA2B,gBAAgB;AAAA,MAI1D,QAAQ;AAAA,IACV;AAIF,QAAI,mBACI,IAAI;AAAA,IACR,WAAW,iBAAiB,IAAI,SAAS,SAAS;AAAA,EAEpD,IAEM,IAAI;AAAA,IACR,cAAc,SAAS,+BAA+B,oBAAoB,eAAe;AAAA,EAC3F;AAEJ;AAMA,SAAS,oBACP,gBACA,kBACsB;AACtB,SAAO;AAAA,IACL,MAAM,eAAe;AAAA;AAAA,IACrB,SAAS,eAAe,WAAW,iBAAiB;AAAA,IACpD,aAAa,eAAe,eAAe,iBAAiB;AAAA,IAC5D,QAAQ,eAAe,UAAU,iBAAiB;AAAA,IAClD,UAAU,eAAe,YAAY,iBAAiB;AAAA,IACtD,YAAY,eAAe,cAAc,iBAAiB;AAAA,IAC1D,SAAS,eAAe,WAAW,iBAAiB;AAAA,IACpD,UAAU,eAAe,YAAY,iBAAiB;AAAA,IACtD,UAAU,eAAe,YAAY,iBAAiB;AAAA,IACtD,QAAQ,eAAe,UAAU,iBAAiB;AAAA,IAClD,OAAO,eAAe,SAAS,iBAAiB;AAAA,IAChD,YAAY,eAAe,cAAc,iBAAiB;AAAA,IAC1D,YAAY,eAAe,cAAc,iBAAiB;AAAA,EAC5D;AACF;AAKA,SAAS,2BAA2B,OAAqD;AACvF,SAAO;AAAA,IACL,MAAM,MAAM;AAAA,IACZ,SAAS,MAAM;AAAA,IACf,aAAa,MAAM;AAAA,IACnB,QAAQ,MAAM;AAAA,IACd,UAAU,MAAM;AAAA,IAChB,YAAY,MAAM;AAAA,IAClB,SAAS,MAAM;AAAA,IACf,UAAU,MAAM;AAAA,IAChB,UAAU,MAAM;AAAA,IAChB,QAAQ,MAAM;AAAA,IACd,OAAO,MAAM;AAAA,IACb,YAAY,MAAM;AAAA,IAClB,YAAY,MAAM;AAAA,EACpB;AACF;;;AJxKA,IAAM,yBAAyB,oBAAI,IAAgC;AAKnE,SAAS,uBAAuB,KAAc,SAAqB;AACjE,MAAM,MAAM,GAAG,IAAI,SAAS,IAAI,IAAI,IAAI,SAAS,OAAO,IAClD,SAA6B,UAC/B,EAAE,SAAS,KAAK,QAAQ,IACxB,EAAE,SAAS,KAAK,SAAS,sBAAsB,iBAAiB,CAAG,EAAE;AACzE,yBAAuB,IAAI,KAAK,MAAM;AACxC;AAKO,SAAS,qBAAqB,MAAc,SAAiD;AAClG,MAAM,MAAM,GAAG,IAAI,IAAI,OAAO;AAC9B,SAAO,uBAAuB,IAAI,GAAG;AACvC;AAuBA,eAAsB,yBACpB,WACA,SAC6B;AAG7B,MAAM,kBADW,MAAM,sBAAsB,WAAW,SAAS,gBAAgB,GACjD,UAa1B,WAAuB;AAAA,IAC3B,MAVkB,0BAA0B;AAAA,MAC5C,QAAQ,SAAS;AAAA,MACjB,MAAM,SAAS;AAAA,MACf,cAAc,SAAS;AAAA,MACvB,aAAa,eAAe;AAAA,MAC5B,UAAU,SAAS;AAAA,IACrB,CAAC;AAAA;AAAA;AAAA;AAAA,IAQC,SAAS,eAAe,SAAS,KAAK,KAAK;AAAA,IAC3C,aAAa,eAAe;AAAA,IAC5B,UAAU,eAAe;AAAA,IACzB,SAAS,eAAe;AAAA,IACxB,UAAU,eAAe;AAAA,EAC3B;AAGA,EAAI,eAAe,QAAQ,SACzB,SAAS,SAAS,eAAe,OAAO,OAItC,eAAe,eACb,OAAO,eAAe,cAAe,WACvC,SAAS,aAAa;AAAA,IACpB,MAAM;AAAA,IACN,KAAK,eAAe;AAAA,EACtB,IACS,eAAe,WAAW,QACnC,SAAS,aAAa;AAAA,IACpB,MAAM,eAAe,WAAW,QAAQ;AAAA,IACxC,KAAK,eAAe,WAAW;AAAA,EACjC;AAKJ,MAAM,QAAQ,MAAM,mBAAmB,SAAS,GAK1C,SAAS,oBAAoB,KAAK,GAElC,MAAe;AAAA,IACnB;AAAA,IACA;AAAA;AAAA;AAAA;AAAA,IAIA,SAAS;AAAA,MACP,GAAG;AAAA,MACH,MAAM;AAAA,MACN,UAAU;AAAA,IACZ;AAAA,EACF,GAGM,oBAAoB,sBAAsB,iBAAiB,OAAO,UAAU;AAGlF,gCAAuB,KAAK,iBAAiB,GAE7C,OAAO,KAAK,kCAAkC;AAAA,IAC5C,MAAM,SAAS;AAAA,IACf,SAAS,SAAS;AAAA,IAClB,WAAW,MAAM;AAAA,IACjB,QAAQ,OAAO;AAAA,IACf,UAAU,OAAO;AAAA,IACjB,YAAY,OAAO;AAAA,EACrB,CAAC,GAEM,EAAE,SAAS,KAAK,SAAS,kBAAkB;AACpD;AAiBA,eAAe,mBAAmB,WAA2C;AAC3E,MAAM,QAAuB,CAAC;AAE9B,MAAI;AACF,mBAAiB,YAAY,UAAU,WAAW,CAAC,GAAG,EAAE,aAAa,sBAAsB,CAAC,GAAG;AAC7F,UAAM,eAAe,SAAS,WAAW,QAAQ;AAcjD,UAXkB,aAAa,MAAM,GAAG,EAC1B,KAAK,UAAQ,OAAO,IAAI,CAAC,KAKnC,aAAa,WAAW,OAAO,KAAK,iBAAiB,UAKrD,aAAa,WAAW,GAAG,aAAa,aAAa,GAAG;AAC1D;AAGF,UAAM,UAAU,MAAM,aAAa,QAAQ;AAE3C,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN;AAAA,QACA,UAAU;AAAA,MACZ,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EAET,SAAS,OAAO;AACd,UAAM,IAAI;AAAA,MACR,iDAAiD,SAAS,KAAK,KAAK;AAAA,IACtE;AAAA,EACF;AACF;;;AK5MA,SAAS,YAAAC,WAAU,YAAAC,iBAAgB;AACnC,SAAS,YAAAC,iBAAgB;;;ACDzB,YAAY,SAAS;AACrB,SAAS,kBAAkB;AAC3B,SAAS,QAAQ,SAAS,MAAM,UAAU,iBAAiB;AAC3D,SAAS,QAAAC,aAAY;AACrB,SAAS,cAAc;AAyBvB,eAAsB,yBAAyB,KAAoC;AACjF,SAAO,MAAM,iCAAiC,IAAI,SAAS,IAAI,IAAI,IAAI,SAAS,OAAO,EAAE;AAEzF,MAAM,UAAUC,MAAK,OAAO,GAAG,uBAAuB,KAAK,IAAI,CAAC,EAAE,GAC5D,cAAcA,MAAK,SAAS,gBAAgB;AAElD,MAAI;AAEF,UAAM,UAAU,OAAO;AAGvB,aAAW,QAAQ,IAAI,OAAO;AAC5B,UAAM,WAAWA,MAAK,SAAS,KAAK,IAAI;AACxC,YAAM,UAAUA,MAAK,UAAU,IAAI,CAAC,GACpC,MAAM,cAAc,UAAU,KAAK,SAAU,KAAK,YAA+B,MAAM;AAAA,IACzF;AAGA,UAAU;AAAA,MACR;AAAA,QACE,MAAM;AAAA,QACN,MAAM;AAAA,QACN,KAAK;AAAA,MACP;AAAA,MACA,IAAI,MAAM,IAAI,OAAK,EAAE,IAAI;AAAA,IAC3B;AAGA,QAAM,gBAAgB,MAAM,SAAS,WAAW,GAG1C,WAAW,WAAW,QAAQ,EAAE,OAAO,aAAa,EAAE,OAAO,KAAK;AAExE,kBAAO,MAAM,oBAAoB,cAAc,MAAM,qBAAqB,QAAQ,EAAE,GAE7E;AAAA,MACL,QAAQ;AAAA,MACR,MAAM,cAAc;AAAA,MACpB;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,iBAAO,MAAM,4BAA4B,EAAE,OAAO,aAAa,IAAI,SAAS,KAAK,CAAC,GAC5E,IAAI,gBAAgB,6BAA6B,KAAK,EAAE;AAAA,EAChE,UAAE;AAEA,QAAI;AACF,MAAI,MAAM,OAAO,WAAW,KAC1B,MAAM,OAAO,WAAW;AAAA,IAI5B,SAAS,cAAc;AACrB,aAAO,KAAK,iCAAiC,EAAE,aAAa,CAAC;AAAA,IAC/D;AAAA,EACF;AACF;AAKA,eAAsB,0BACpB,eACA,kBAC2B;AAC3B,SAAO,MAAM,oCAAoC,cAAc,MAAM,SAAS;AAE9E,MAAM,UAAUA,MAAK,OAAO,GAAG,uBAAuB,KAAK,IAAI,CAAC,EAAE,GAE5D,eAAe,CAAC,WAEb,OAAO,UAAU,KAAK,OAAO,CAAC,MAAM,MAAQ,OAAO,CAAC,MAAM,KAG7D,sBAAsB,CAAC,QAAgB,WAAmB,QAChD,OAAO,SAAS,GAAG,KAAK,IAAI,UAAU,OAAO,MAAM,CAAC,EAG/D,SAAS,MAAM,EACf,QAAQ,mDAAmD,QAAG,GAG7D,SAAS,aAAa,aAAa,GACnC,cAAcA,MAAK,SAAS,SAAS,mBAAmB,aAAa;AAE3E,MAAI;AAEF,QAAM,iBAAiB,WAAW,QAAQ,EAAE,OAAO,aAAa,EAAE,OAAO,KAAK;AAE9E,QAAI,oBAAoB,mBAAmB;AACzC,YAAM,IAAI;AAAA,QACR,wCAAwC,gBAAgB,UAAU,cAAc;AAAA,MAClF;AAIF,UAAM,UAAU,OAAO,GACvB,MAAM,UAAU,aAAa,aAAa,GAG1C,MAAU,YAAQ;AAAA,MAChB,MAAM;AAAA,MACN,KAAK;AAAA,IACP,CAAC;AAGD,QAAM,QAAuB,CAAC,GACxB,eAAe,OAAO,KAAa,WAAmB,OAAsB;AAChF,UAAM,UAAU,MAAM,QAAQ,GAAG;AAEjC,eAAW,SAAS,SAAS;AAC3B,YAAM,WAAWA,MAAK,KAAK,KAAK,GAC1B,eAAe,WAAWA,MAAK,UAAU,KAAK,IAAI,OAElD,QAAQ,MAAM,KAAK,QAAQ;AACjC,YAAI,MAAM,OAAO,GAAG;AAClB,cAAM,UAAU,MAAM,aAAa,QAAQ;AAC3C,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN;AAAA,YACA,UAAU;AAAA,UACZ,CAAC;AAAA,QACH,MAAO,CAAI,MAAM,YAAY,KAC3B,MAAM,aAAa,UAAU,YAAY;AAAA,MAE7C;AAAA,IACF;AAEA,UAAM,aAAa,OAAO;AAG1B,QAAM,gBAAgB,MAAM,OAAO,OAAK,EAAE,SAAS,oBAAoB,EAAE,SAAS,aAAa;AAE/F,kBAAO,MAAM,aAAa,cAAc,MAAM,qBAAqB,GAE5D;AAAA,MACL,OAAO;AAAA,MACP,UAAU;AAAA,IACZ;AAAA,EACF,SAAS,OAAO;AACd,QAAM,QAAQ,MAAM;AAElB,UAAI;AACF;AAEF,UAAM,UAAU,oBAAoB,aAAa,GAC3C,UAAU,QAAQ,UAAU;AAClC,aAAI,QAAQ,WAAW,GAAG,KAAK,QAAQ,WAAW,GAAG,KAAK,QAAQ,WAAW,GAAG,IACvE,kEAAkE,QAAQ,MAAM,GAAG,EAAE,CAAC,OAExF,mEAAmE,QAAQ,MAAM,GAAG,EAAE,CAAC;AAAA,IAChG,GAAG;AAEH,WAAO,MAAM,6BAA6B,EAAE,OAAO,QAAQ,KAAK,CAAC;AACjE,QAAM,cAAc,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACzE,UAAM,IAAI,gBAAgB,OAAO,GAAG,WAAW,KAAK,IAAI,KAAK,8BAA8B,WAAW,EAAE;AAAA,EAC1G,UAAE;AAEA,QAAI;AACF,MAAI,MAAM,OAAO,WAAW,KAC1B,MAAM,OAAO,WAAW;AAAA,IAE5B,SAAS,cAAc;AACrB,aAAO,KAAK,iCAAiC,EAAE,aAAa,CAAC;AAAA,IAC/D;AAAA,EACF;AACF;AAKO,SAAS,wBACd,aACA,SACA,aACU;AACV,MAAM,WAAW,IAAI,SAAS;AAG9B,WAAS,OAAO,QAAQ,WAAW,GAC/B,WACF,SAAS,OAAO,WAAW,OAAO;AAIpC,MAAM,OAAO,IAAI,KAAK,CAAC,YAAY,MAAM,GAAG,EAAE,MAAM,mBAAmB,CAAC,GAClE,WAAW,UAAU,GAAG,WAAW,IAAI,OAAO,GAAG,cAAc,SAAS,KAAK,GAAG,WAAW,IAAI,WAAW,GAAG,cAAc,SAAS;AAC1I,kBAAS,OAAO,QAAQ,MAAM,QAAQ,GAE/B;AACT;AAKO,SAAS,uBACd,QACA,cACA,kBACS;AACT,MAAI;AAEF,QAAI,gBAAgB,OAAO,WAAW;AACpC,oBAAO,KAAK,yBAAyB;AAAA,QACnC,UAAU;AAAA,QACV,QAAQ,OAAO;AAAA,MACjB,CAAC,GACM;AAIT,QAAI,kBAAkB;AACpB,UAAM,iBAAiB,WAAW,QAAQ,EAAE,OAAO,MAAM,EAAE,OAAO,KAAK;AACvE,UAAI,mBAAmB;AACrB,sBAAO,KAAK,6BAA6B;AAAA,UACvC,UAAU;AAAA,UACV,QAAQ;AAAA,QACV,CAAC,GACM;AAAA,IAEX;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,kBAAO,MAAM,kCAAkC,EAAE,MAAM,CAAC,GACjD;AAAA,EACT;AACF;;;ADzPA,SAAS,UAAAC,eAAc;AASvB,YAAYC,WAAU;AAmBf,SAAS,gBAAgBC,OAA8B;AAC5D,SAAOA,MAAK,SAAS,cAAc,SAAS,KAAKA,MAAK,SAAS,cAAc,YAAY,IAAI,YAAY;AAC3G;AAWA,eAAsB,yBACpB,SACA,SACkB;AAElB,MAAM,kBAAkB,MAAM,4BAA4B,SAAS,SAAS,gBAAgB;AAE5F,MAAI,gBAAgB,aAAa,gBAAgB,SAAS,gBAAgB,gBAAgB,SAAS,wBAAwB;AACzH,WAAO,KAAK,gCAAgC,gBAAgB,IAAI,yCAAyC,EAAE,QAAQ,CAAC;AACpH,QAAM,EAAE,SAAS,IAAI,IAAI,MAAM,yBAAyB,SAAS,OAAO;AACxE,WAAO;AAAA,EACT;AAMA,MAAI,gBAAgB,YAAY,gBAAgB,SAAS,eAAe;AACtE,QAAI,SAAS,cAAc;AACzB,aAAO,KAAK,mGAAmG;AAAA,QAC7G;AAAA,QACA,cAAc,QAAQ;AAAA,MACxB,CAAC;AACD,UAAM,iBAAyC;AAAA,QAC7C,QAAQ;AAAA,QACR,MAAM,SAAS,eAAeC,UAAS,OAAO;AAAA,QAC9C,QAAQ;AAAA,MACV,GACM,EAAE,SAAS,IAAI,IAAI,MAAM,yBAAyB,SAAS;AAAA,QAC/D,GAAG;AAAA,QACH,kBAAkB;AAAA,MACpB,CAAC;AACD,aAAO;AAAA,IACT;AACA,UAAM,IAAI;AAAA,MACR,cAAc,OAAO;AAAA,IAEvB;AAAA,EACF;AAGA,MAAI,SAAS,MAAM,kBAAkB,OAAO;AAC5C,MAAI,CAAC,QAAQ;AAIX,QADmB,MAAM,iBAAiB,OAAO,GACjC;AACd,UAAM,iBAAyC;AAAA,QAC7C,QAAQ;AAAA,QACR,MAAM,SAAS,eAAeA,UAAS,OAAO;AAAA,QAC9C,QAAQ;AAAA;AAAA,MACV,GACM,qBAAqB,MAAM,4BAA4B,SAAS,cAAc;AACpF,UAAI,mBAAmB,YAAY,mBAAmB,SAAS,uBAAuB;AACpF,YAAM,EAAE,SAAS,IAAI,IAAI,MAAM,yBAAyB,SAAS;AAAA,UAC/D,GAAG;AAAA,UACH,kBAAkB;AAAA,QACpB,CAAC;AACD,eAAO;AAAA,MACT;AAAA,IACF;AAGA,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI;AAAA,QACR,cAAc,OAAO,yEACV,cAAc,eAAe,OAAO,oBAAoB,eAAe;AAAA,MACpF;AAGF,QAAM,mBAAmBA,UAAS,OAAO;AAWzC,aAAS;AAAA,MACP,MAXmB,SAAS,SAC1B,0BAA0B;AAAA,QACxB,QAAQ,QAAQ;AAAA,QAChB,MAAM,QAAQ;AAAA,QACd,cAAc,QAAQ;AAAA,QACtB,aAAa;AAAA,QACb,UAAU,QAAQ;AAAA,MACpB,CAAC,IACD;AAAA,MAIF,SAAS;AAAA,IACX;AAAA,EACF;AAIA,MAAI,SAAS,QAAQ;AACnB,QAAM,eAAe,OAAO,MACtB,aAAa,0BAA0B;AAAA,MAC3C,QAAQ,QAAQ;AAAA,MAChB,MAAM,QAAQ;AAAA,MACd,cAAc,QAAQ;AAAA,MACtB,aAAa;AAAA;AAAA,MACb,UAAU,QAAQ;AAAA,IACpB,CAAC;AAGD,IAAI,eAAe,iBACjB,OAAO,OAAO;AAAA,EAElB;AAGA,MAAM,QAAuB,CAAC;AAE9B,MAAI;AACF,mBAAiB,YAAY,UAAU,SAAS,CAAC,GAAG,EAAE,aAAa,sBAAsB,CAAC,GAAG;AAC3F,UAAM,eAAeC,UAAS,SAAS,QAAQ;AAG/C,UAAIC,QAAOF,UAAS,YAAY,CAAC;AAC/B;AAGF,UAAM,UAAU,MAAM,aAAa,QAAQ;AAE3C,YAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN;AAAA,QACA,UAAU;AAAA,MACZ,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,MACL,UAAU;AAAA,MACV;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,iBAAO,MAAM,0CAA0C,OAAO,IAAI,EAAE,MAAM,CAAC,GACrE,IAAI,gBAAgB,0CAA0C,KAAK,EAAE;AAAA,EAC7E;AACF;AAMA,eAAsB,uBAAuB,aAAuC;AAElF,MAAI;AACJ,MAAI;AACF,oBAAgB,MAAMG,UAAS,WAAW;AAAA,EAC5C,SAAS,OAAO;AACd,UAAM,IAAI,gBAAgB,gCAAgC,WAAW,MAAM,KAAK,EAAE;AAAA,EACpF;AAGA,MAAM,YAAY,MAAM,0BAA0B,aAAa,GAGzD,iBAAiB,UAAU,MAAM;AAAA,IACrC,OAAK,EAAE,SAAS,cAAc,qBAAqB,EAAE,SAAS;AAAA,EAChE;AAEA,MAAI,CAAC;AACH,UAAM,IAAI;AAAA,MACR,YAAY,WAAW,8BAA8B,cAAc,eAAe;AAAA,IACpF;AAIF,MAAM,SAAc,WAAK,eAAe,OAAO;AAE/C,MAAI,CAAC,OAAO;AACV,UAAM,IAAI;AAAA,MACR,YAAY,WAAW,sBAAsB,cAAc,eAAe;AAAA,IAC5E;AAGF,gBAAO,MAAM,kBAAkB,OAAO,IAAI,IAAI,OAAO,OAAO,kBAAkB,WAAW,EAAE,GAEpF;AAAA,IACL,UAAU;AAAA,IACV,OAAO,UAAU;AAAA,EACnB;AACF;AASA,eAAsB,oBACpBJ,OACA,SACkB;AAGlB,SAFmB,gBAAgBA,KAAI,MAEpB,YACV,MAAM,uBAAuBA,KAAI,IAEjC,MAAM,yBAAyBA,OAAM,OAAO;AAEvD;;;ANtMO,IAAM,iBAAN,MAAqB;AAAA;AAAA;AAAA;AAAA,EAM1B,MAAM,YACJ,aACA,SACA,MACkB;AAMlB,QALA,OAAO,MAAM,oBAAoB,WAAW,IAAI,EAAE,QAAQ,CAAC,GAE3D,oBAAoB,WAAW,GAG3B,SAAS;AACX,UAAM,0BAA0B,qBAAqB,aAAa,OAAO;AACzE,UAAI;AACF,sBAAO,MAAM,oCAAoC,WAAW,IAAI,OAAO,EAAE,GAClE,wBAAwB;AAAA,IAEnC;AAEA,QAAI,gBAA+B,MAAM,iBAAiB,WAAW,OAAO;AAE5E,QAAI,OAAM;AAEH,UAAI,YAAY;AACrB,wBAAgB;AAAA,eACP;AAET,YAAI,eAAe,OAAO;AACxB,0BAAgB;AAAA,aACX;AAEL,cAAM,oBAAoB,MAAM,oBAAoB,WAAW;AAC/D,cAAI,kBAAkB,WAAW;AAC/B,kBAAM,IAAI,qBAAqB,WAAW;AAI5C,cADA,gBAAgB,oBAAoB,SAAS,iBAAiB,GAC1D,CAAC;AACH,kBAAM,IAAI;AAAA,cACR,kBAAkB,WAAW,sBAAsB,OAAO,0BAA0B,kBAAkB,KAAK,IAAI,CAAC;AAAA,YAClH;AAEF,iBAAO,MAAM,2BAA2B,OAAO,SAAS,aAAa,kBAAkB,WAAW,GAAG;AAAA,QACvG;AAAA;AAGA,wBAAgB,MAAM,wBAAwB,WAAW;AAG3D,QAAI,CAAC,iBAAiB,CAAC,MAAM;AAC3B,YAAM,IAAI,qBAAqB,WAAW;AAG5C,QAAM,cAAc,MAAM,iBACtB,KAAK,iBACL,sBAAsB,aAAa,iBAAiB,MAAS;AACjE,QAAI,CAAE,MAAM,OAAO,WAAW;AAC5B,YAAM,IAAI,qBAAqB,WAAW;AAG5C,QAAI;AAGF,aADY,MAAM,oBAAoB,aAAa,EAAE,YAAY,CAAC;AAAA,IAEpE,SAAS,OAAO;AACd,YAAI,iBAAiB,wBAAyB,MAAc,SAAS,oBAC7D,IAAI,qBAAqB,WAAW,KAE5C,OAAO,MAAM,2BAA2B,WAAW,IAAI,EAAE,MAAM,CAAC,GAC1D,IAAI,oBAAoB,2BAA2B,KAAK,EAAE;AAAA,IAClE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,YAAY,KAAc,UAA8B,CAAC,GAAkB;AAC/E,QAAM,EAAE,UAAU,MAAM,IAAI,KACtB,cAAc,sBAAsB,SAAS,MAAM,SAAS,OAAO;AAEzE,WAAO,MAAM,mBAAmB,SAAS,IAAI,IAAI,SAAS,WAAW,WAAW,IAAI,EAAE,YAAY,CAAC;AAEnG,QAAI;AAEF,YAAM,UAAU,WAAW;AAG3B,eAAW,QAAQ,OAAO;AACxB,YAAM,WAAWK,MAAK,aAAa,KAAK,IAAI;AAC5C,cAAM,UAAU,QAAQ,QAAQ,CAAC,GACjC,MAAM,cAAc,UAAU,KAAK,SAAU,KAAK,YAA+B,MAAM;AAAA,MACzF;AACA,MAAI,QAAQ,UACV,MAAM,KAAK,sBAAsB,WAAW,IAE5C,MAAM,KAAK,uBAAuB,WAAW,GAG/C,OAAO,KAAK,YAAY,SAAS,IAAI,IAAI,SAAS,OAAO,sBAAsB;AAAA,IACjF,SAAS,OAAO;AACd,mBAAO,MAAM,2BAA2B,SAAS,IAAI,IAAI,SAAS,OAAO,IAAI,EAAE,MAAM,CAAC,GAChF,IAAI,oBAAoB,2BAA2B,KAAK,EAAE;AAAA,IAClE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,qBAAqB,aAAqB,SAAgC;AAC9E,WAAO,KAAK,6BAA6B,WAAW,IAAI,OAAO,EAAE,GAEjE,oBAAoB,WAAW;AAE/B,QAAM,cAAc,sBAAsB,aAAa,OAAO;AAE9D,QAAI,CAAE,MAAM,OAAO,WAAW;AAC5B,YAAM,IAAI,qBAAqB,GAAG,WAAW,IAAI,OAAO,EAAE;AAG5D,QAAI;AACF,YAAM,OAAO,WAAW,GACxB,OAAO,KAAK,oBAAoB,WAAW,IAAI,OAAO,wBAAwB;AAAA,IAChF,SAAS,OAAO;AACd,mBAAO,MAAM,qCAAqC,WAAW,IAAI,OAAO,IAAI,EAAE,MAAM,CAAC,GAC/E,IAAI,oBAAoB,qCAAqC,KAAK,EAAE;AAAA,IAC5E;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,aAAoC;AACtD,WAAO,KAAK,qCAAqC,WAAW,EAAE,GAE9D,oBAAoB,WAAW;AAE/B,QAAM,cAAc,eAAe,WAAW;AAE9C,QAAI,CAAE,MAAM,OAAO,WAAW;AAC5B,YAAM,IAAI,qBAAqB,WAAW;AAG5C,QAAI;AACF,YAAM,OAAO,WAAW,GACxB,OAAO,KAAK,4BAA4B,WAAW,wBAAwB;AAAA,IAC7E,SAAS,OAAO;AACd,mBAAO,MAAM,6BAA6B,WAAW,IAAI,EAAE,MAAM,CAAC,GAC5D,IAAI,oBAAoB,6BAA6B,KAAK,EAAE;AAAA,IACpE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,aAAuC;AACzD,+BAAoB,WAAW,GACT,MAAM,wBAAwB,WAAW,MACtC;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,uBAAuB,aAAqB,SAAgD;AAChG,QAAM,cAAc,sBAAsB,aAAa,OAAO;AAG9D,QAAI,CAFkB,MAAM,OAAO,WAAW;AAG5C,aAAO,EAAE,QAAQ,IAAO,WAAW,IAAO,OAAO,CAAC,EAAE;AAGtD,QAAM,eAAeA,MAAK,aAAa,cAAc,iBAAiB,GAChE,iBAAiB,MAAM,OAAO,YAAY,GAC5C,YAAY,CAAC;AACjB,QAAI;AACF,UAAI;AAEF,oBAAY,GADK,MAAM,gBAAgB,YAAY,GACb;AAAA,MACxC,SAAS,OAAO;AACd,eAAO,KAAK,qDAAqD,EAAE,aAAa,SAAS,MAAM,CAAC,GAChG,YAAY;AAAA,MACd;AAGF,QAAM,QAAQ,MAAM,KAAK,qBAAqB,WAAW;AAEzD,WAAO,EAAE,QAAQ,IAAM,WAAW,MAAM;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,qBAAqB,aAA6C;AAC9E,QAAM,QAAuB,CAAC;AAE9B,QAAI;AAGF,qBAAiB,YAAY,UAAU,aAAa,CAAC,GAAG,EAAE,aAAa,sBAAsB,CAAC,GAAG;AAC/F,YAAM,eAAeC,UAAS,aAAa,QAAQ;AAGnD,YAAIC,QAAOC,UAAS,YAAY,CAAC;AAC/B;AAGF,YAAM,UAAU,MAAM,aAAa,QAAQ;AAE3C,cAAM,KAAK;AAAA,UACT,MAAM;AAAA,UACN;AAAA,UACA,UAAU;AAAA,QACZ,CAAC;AAAA,MACH;AAEA,oBAAO,MAAM,cAAc,MAAM,MAAM,+BAA+B,EAAE,YAAY,CAAC,GAC9E;AAAA,IACT,SAAS,OAAO;AACd,mBAAO,MAAM,kDAAkD,WAAW,IAAI,EAAE,MAAM,CAAC,GACjF,IAAI,oBAAoB,qCAAqC,KAAK,EAAE;AAAA,IAC5E;AAAA,EACF;AAAA,EAEA,MAAc,sBAAsB,aAAoC;AACtE,QAAM,eAAeH,MAAK,aAAa,cAAc,iBAAiB;AACtE,QAAI,CAAE,MAAM,OAAO,YAAY;AAC7B;AAGF,QAAM,WAAW,MAAM,gBAAgB,YAAY;AACnD,IAAC,SAAiB,UAAU,IAC5B,MAAM,gBAAgB,cAAc,QAAQ;AAAA,EAC9C;AAAA,EAEA,MAAc,uBAAuB,aAAoC;AACvE,QAAM,eAAeA,MAAK,aAAa,cAAc,iBAAiB;AACtE,QAAI,CAAE,MAAM,OAAO,YAAY;AAC7B;AAGF,QAAM,WAAW,MAAM,gBAAgB,YAAY;AACnD,IAAK,SAAiB,YAAY,WAChC,OAAQ,SAAiB,SACzB,MAAM,gBAAgB,cAAc,QAAQ;AAAA,EAEhD;AAAA,EAEA,MAAc,qBAAqB,aAAwC;AACzE,QAAM,QAAkB,CAAC;AACzB,mBAAiB,YAAY,UAAU,aAAa,CAAC,GAAG,EAAE,aAAa,sBAAsB,CAAC,GAAG;AAC/F,UAAM,eAAeC,UAAS,aAAa,QAAQ;AACnD,MAAIC,QAAOC,UAAS,YAAY,CAAC,KAGjC,MAAM,KAAK,YAAY;AAAA,IACzB;AACA,iBAAM,KAAK,GACJ;AAAA,EACT;AACF,GAGa,iBAAiB,IAAI,eAAe;;;AJlSjD,eAAsB,gCAAgC,WAAkC;AACtF,MAAM,iBAAiB,uBAAuB,SAAS,GACjD,cAAc,oBAAoB,SAAS;AAEjD,QAAM,QAAQ,IAAI;AAAA,IAChB,UAAU,cAAc;AAAA,IACxB,UAAU,WAAW;AAAA,EACvB,CAAC;AACH;AAQA,eAAsB,0BAA0B,WAAmB,QAAiB,IAAO,QAAiD;AAC1I,MAAM,MAAM,UAAU,cAAc;AACpC,QAAM,gCAAgC,SAAS;AAE/C,MAAM,iBAAiB,uBAAuB,SAAS,GACjD,cAAcC,UAAS,SAAS,GAChC,kBAA8B;AAAA,IAClC,MAAM;AAAA,IACN,cAAc,CAAC;AAAA,IACf,oBAAoB,CAAC;AAAA,EACvB;AAEA,SAAI,MAAM,OAAO,cAAc,IACxB,SAGL,MAAM,gBAAgB,gBAAgB,eAAe,GACrD,OAAO,KAAK,8CAA8C,WAAW,EAAE,GACvE,IAAI,QAAQ,+DAA+D,WAAW,EAAE,GACjF,mBALE,QAQX,MAAM,gBAAgB,gBAAgB,eAAe,GACrD,OAAO,KAAK,uCAAuC,GACnD,IAAI,QAAQ,wDAAwD,GAC7D;AACT;AAoHA,eAAsB,gBACpB,WACA,aACA,gBACA,QAAiB,IACjB,iBACA,SAAkB,IAClB,MACA,KACA,KACA,cACA,QACe;AACf,MAAM,MAAM,UAAU,cAAc,GAC9B,iBAAiB,uBAAuB,SAAS;AAEvD,MAAI,CAAE,MAAM,OAAO,cAAc;AAC/B;AAIF,MAAI,MAAM,cAAc,WAAW,WAAW,GAAG;AAC/C,WAAO,MAAM,sCAAsC,WAAW,mCAAmC;AACjG;AAAA,EACF;AAEA,MAAM,SAAS,MAAM,gBAAgB,cAAc;AACnD,EAAK,OAAO,iBAAc,OAAO,eAAe,CAAC,IAC5C,OAAO,kBAAkB,gBAAgB,MAAG,OAAO,kBAAkB,gBAAgB,IAAI,CAAC;AAE/F,MAAM,wBAAwB,qBAAqB,WAAW,GACxD,kBAAkB,iBAAiB,GAAG,WAAW,IAAI,cAAc,KAAK,aACxE,oBAAoB,OAAO,cAC3B,uBAAuB,OAAO,kBAAkB,gBAAgB,GAEhE,YAAY,CAAC,QACjB,IAAI,UAAU,SAAO,0BAA0B,IAAI,MAAM,qBAAqB,CAAC,GAE7E,kBAA8D,MAC9D,gBAAgB,UAAU,iBAAiB;AAC/C,EAAI,iBAAiB,IACnB,kBAAkB,kBAAkB,gBAEpC,gBAAgB,UAAU,oBAAoB,GAC1C,iBAAiB,IACnB,kBAAkB,kBAAkB,mBAEpC,gBAAgB;AAIpB,MAAM,gBACJ,mBAAmB,iBAAiB,IAChC,OAAO,eAAe,EAAG,aAAa,GAAG,UACzC,QAEA,oBAAoB,qBAAqB,cAAc,KAAK,qBAAqB,eAAe,GAClG,iBAAqC,OAAkB,oBAAZ,SAA4C;AAE3F,MAAI,CAAC,OAAO,CAAC,qBAAqB,gBAAgB;AAChD,QAAM,cAAc,mBAAmB,cAAc,GAC/C,eAAe,iBAAiB,WAAW;AAGjD,QAFA,iBAAiB,mBAAmB,cAEhC,CAAC,mBAAmB,eAAe;AACrC,UAAM,sBAAsB,4BAA4B,aAAa,GAC/D,qBAAqB,CAAC,oBAAoB,cAAc;AAE9D,MAAI,sBACE,sBAGF,iBAAiB,iBAAiB,WAAW,GAC7C,OAAO;AAAA,QACL,6CAA6C,aAAa,gBAAgB,cAAc,SACjF,WAAW,wBAAwB,cAAc;AAAA,MAC1D,KAIA,iBAAiB,gBAEV,qBAAqB,eAAe,WAAW,IAGxD,iBAAiB,gBAIjB,iBAAiB;AAAA,IAErB;AAAA,EACF;AAGA,MAAI;AACJ,EAAI,QACF,WAAW,MAAM,GAAG,GAAG,IAAI,GAAG,KAAK;AAGrC,MAAM,aAAgC;AAAA,IACpC,MAAM;AAAA,IACN,GAAI,iBAAiB,EAAE,SAAS,eAAe,IAAI,CAAC;AAAA,IACpD,GAAI,OAAO,EAAE,KAAK,IAAI,CAAC;AAAA;AAAA,IACvB,GAAI,WAAW,EAAE,KAAK,SAAS,IAAI,CAAC;AAAA;AAAA,IACpC,GAAI,eAAe,EAAE,MAAM,aAAa,IAAI,CAAC;AAAA;AAAA,EAC/C,GAII;AACJ,EAAI,oBAAoB,kBAAkB,oBAAoB,CAAC,SAC7D,cAAc,kBAAkB,kBAChC,OAAO,KAAK,wCAAwC,eAAe,EAAE,KAC5D,oBAAoB,kBAAkB,gBAAgB,SAC/D,cAAc,kBAAkB,kBAChC,OAAO,KAAK,yDAAyD,eAAe,EAAE,KAEtF,cAAc,QAAQ,kBAAkB,mBAAmB,kBAAkB,cAI3E,mBAAmB,oBAAoB,eAAe,iBAAiB,MACzE,OAAO,eAAe,EAAG,OAAO,eAAe,CAAC,GAChD,gBAAgB,IAChB,kBAAkB;AAIpB,MAAM,iBAAiB,OAAO,WAAW,GACnC,sBACJ,oBAAoB,cAAc,UAAU,cAAc,IAAI;AAEhE,EAAI,uBAAuB,IACI,eAAe,mBAAmB,EACnB,YAAY,WAAW,YAEjE,eAAe,mBAAmB,IAAI,YACjC,WACH,OAAO,KAAK,wCAAwC,eAAe,EAAE,GACrE,IAAI,QAAQ,WAAW,eAAe,0BAA0B,OAIpE,eAAe,KAAK,UAAU,GACzB,WACH,OAAO,KAAK,iCAAiC,eAAe,EAAE,GAC9D,IAAI,QAAQ,SAAS,eAAe,0BAA0B,KAIlE,MAAM,gBAAgB,gBAAgB,MAAM;AAC9C;AAcA,SAAS,+BACP,KACA,eACS;AAET,MAAM,oBAAoB,8BAA8B,IAAI,IAAI,GAC1D,qBAAqB,8BAA8B,aAAa;AAGtE,MAAI,sBAAsB;AACxB,WAAO;AAIT,MAAM,YAAY,IAAI,OAAO,IAAI;AACjC,MAAI,WAAW;AACb,QAAM,aAAa,kBAAkB,SAAS;AAC9C,QAAI,CAAC;AACH,aAAO;AAGT,QAAM,EAAE,UAAU,KAAK,IAAI,YAGrB,aAAa,IAAI,SAAS,IAAI,cAAc,WAAW,IAAI,IAC7D,IAAI,aAAa,UAAU,CAAC,IAC5B,IAAI,eAGF,gBAAgB;AAAA,MACpB,GAAG,QAAQ,IAAI,IAAI;AAAA,MACnB,IAAI,QAAQ,IAAI,IAAI;AAAA,MACpB,MAAM,QAAQ,IAAI,IAAI;AAAA,IACxB;AAEA,QAAI,YAAY;AACd,oBAAc;AAAA,QACZ,GAAG,QAAQ,IAAI,IAAI,IAAI,UAAU;AAAA,QACjC,IAAI,QAAQ,IAAI,IAAI,IAAI,UAAU;AAAA,QAClC,MAAM,QAAQ,IAAI,IAAI,IAAI,UAAU;AAAA,MACtC;AAIA,UAAM,eAAe,WAAW,MAAM,GAAG,EAAE,IAAI;AAC/C,MAAI,gBAAgB,iBAAiB,cACnC,cAAc;AAAA,QACZ,GAAG,QAAQ,IAAI,IAAI,IAAI,YAAY;AAAA,QACnC,IAAI,QAAQ,IAAI,IAAI,IAAI,YAAY;AAAA,QACpC,MAAM,QAAQ,IAAI,IAAI,IAAI,YAAY;AAAA,MACxC;AAAA,IAEJ;AAGA,QAAM,kBAAkB,qBAAqB,aAAa;AAC1D,aAAW,gBAAgB;AACzB,UAAI,qBAAqB,YAAY,MAAM;AACzC,eAAO;AAAA,EAGb;AAEA,SAAO;AACT;AAeA,eAAsB,6BACpB,cACA,gBAC6C;AAC7C,MAAI,CAAE,MAAM,OAAO,YAAY,EAAI,QAAO,EAAE,SAAS,GAAM;AAE3D,MAAI;AACF,QAAM,SAAS,MAAM,gBAAgB,YAAY,GAC3C,WAAuD,CAAC,kBAAkB,cAAc,kBAAkB,gBAAgB,GAC5H,UAAU,IACV,oBACA,qBAAqB;AAEzB,aAAW,WAAW,UAAU;AAC9B,UAAM,MAAM,OAAO,OAAO;AAC1B,UAAI,CAAC,IAAK;AACV,MAAI,IAAI,SAAS,MAAG,qBAAqB;AAIzC,UAAM,OAAO,IAAI,OAAO,SAAO,CAAC,+BAA+B,KAAK,cAAc,CAAC;AAEnF,MAAI,KAAK,WAAW,IAAI,WACtB,OAAO,OAAO,IAAI,MAClB,UAAU,IACV,qBAAqB;AAAA,IAEzB;AAKA,YAAI,WAAW,uBACb,MAAM,gBAAgB,cAAc,MAAM,GAErC,EAAE,SAAS,SAAS,mBAAmB;AAAA,EAChD,SAAS,OAAO;AACd,kBAAO,KAAK,kDAAkD,cAAc,KAAK,KAAK,EAAE,GACjF,EAAE,SAAS,GAAM;AAAA,EAC1B;AACF;AAMA,eAAsB,gCACpB,WACA,aACkB;AAClB,MAAM,iBAAiB,uBAAuB,SAAS;AAEvD,UADe,MAAM,6BAA6B,gBAAgB,WAAW,GAC/D;AAChB;AAUA,eAAsB,iCACpB,cACA,WACwB;AACxB,MAAI,CAAE,MAAM,OAAO,YAAY,EAAI,QAAO;AAE1C,MAAI;AACF,QAAM,SAAS,MAAM,gBAAgB,YAAY,GAC3C,WAAuD,CAAC,kBAAkB,cAAc,kBAAkB,gBAAgB;AAEhI,aAAW,WAAW,UAAU;AAC9B,UAAM,MAAM,OAAO,OAAO;AAC1B,UAAI,CAAC,IAAK;AAEV,UAAM,QAAQ,IAAI,KAAK,SAAO,+BAA+B,KAAK,SAAS,CAAC;AAC5E,UAAI,MAAO,QAAO,MAAM;AAAA,IAC1B;AACA,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,SAAS,qBAAqB,OAAe,SAA0B;AACrE,MAAI,CAAC,SAAS,CAAC;AACb,WAAO;AAET,MAAI;AACF,WAAOC,QAAO,UAAU,SAAS,OAAO,EAAE,mBAAmB,GAAK,CAAC;AAAA,EACrE,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;AHlfA,eAAsB,4BACpB,aACA,aACA,SAC8B;AAC9B,MAAM,SAAwB;AAAA,IAC5B,MAAM;AAAA,IACN;AAAA,IACA,SAAS,QAAQ;AAAA,IACjB,cAAc,QAAQ;AAAA,EACxB;AAEA,SAAO;AAAA,IACL,WAAW;AAAA,IACX,WAAW,YAAY;AAAA,IACvB;AAAA,IACA,MAAM;AAAA,IACN;AAAA,IACA,WAAW,mBAAmB,QAAQ,SAAS,KAAK,CAAC;AAAA,IACrD,cAAc;AAAA,IACd,kBAAkB,CAAC;AAAA,IACnB,UAAU,CAAC;AAAA,IACX,QAAQ,CAAC;AAAA,EACX;AACF;AAKA,eAAsB,wBACpB,aACA,YACA,SAC8B;AAG9B,MAAM,SAAwB;AAAA,IAC5B,MAAM;AAAA,IACN,aAAa;AAAA;AAAA,IACb,WAAW;AAAA,IACX,YAAY,QAAQ;AAAA,EACtB;AAEA,SAAO;AAAA,IACL,WAAW;AAAA,IACX,WAAW,YAAY;AAAA,IACvB;AAAA,IACA,MAAM;AAAA,IACN;AAAA,IACA,WAAW,mBAAmB,QAAQ,SAAS,KAAK,CAAC;AAAA,IACrD,cAAc;AAAA,IACd,kBAAkB,CAAC;AAAA,IACnB,UAAU,CAAC;AAAA,IACX,QAAQ,CAAC;AAAA,EACX;AACF;AAKA,eAAsB,uBACpB,aACA,QACA,SAC8B;AAC9B,MAAM,SAAwB;AAAA,IAC5B,MAAM;AAAA,IACN,aAAa;AAAA;AAAA,IACb;AAAA,IACA,QAAQ,QAAQ;AAAA,IAChB,SAAS,QAAQ;AAAA,EACnB;AAEA,SAAO;AAAA,IACL,WAAW;AAAA,IACX,WAAW,YAAY;AAAA,IACvB;AAAA,IACA,MAAM;AAAA,IACN;AAAA,IACA,WAAW,mBAAmB,QAAQ,SAAS,KAAK,CAAC;AAAA,IACrD,cAAc;AAAA,IACd,kBAAkB,CAAC;AAAA,IACnB,UAAU,CAAC;AAAA,IACX,QAAQ,CAAC;AAAA,EACX;AACF;AAMA,eAAsB,iCACpB,aACA,SACA,OAA4B,WACS;AACrC,MAAM,MAAM,YAAY;AAGxB,QAAM,gCAAgC,GAAG,GAGzC,MAAM,0BAA0B,GAAG;AAEnC,MAAM,iBAAiB,uBAAuB,GAAG,GAC3C,iBAAiB,uBAAuB,GAAG;AAGjD,MAAI,CAAE,MAAM,OAAO,cAAc;AAC/B,WAAO;AAIT,MAAI;AACJ,MAAI;AACF,aAAS,MAAM,gBAAgB,cAAc;AAAA,EAC/C,SAAS,OAAO;AACd,kBAAO,KAAK,sCAAsC,KAAK,EAAE,GAClD;AAAA,EACT;AAKA,MAAM,SAAwB;AAAA,IAC5B,MAAM;AAAA,IACN,aAJkB,OAAO,QAAQC,UAAS,GAAG;AAAA,IAK7C,SAAS,OAAO;AAAA,IAChB,aAAa;AAAA,EACf;AAEA,SAAO;AAAA,IACL,WAAW;AAAA,IACX,WAAW,YAAY;AAAA,IACvB;AAAA,IACA;AAAA,IACA,SAAS,SAAS,UAAU,EAAE,GAAG,SAAS,OAAO,GAAK,IAAI;AAAA,IAC1D,WAAW,mBAAmB,QAAQ,SAAS,KAAK,CAAC;AAAA,IACrD,cAAc;AAAA,IACd,kBAAkB,CAAC;AAAA,IACnB,UAAU,CAAC;AAAA,IACX,QAAQ,CAAC;AAAA,EACX;AACF;AAOA,eAAsB,oBACpB,aACA,cACA,SACkF;AAElF,MAAI,CAAC;AACH,WAAO,yBAAyB,aAAa,OAAO;AAItD,MAAM,iBAAiB,MAAM,qBAAqB,cAAc,YAAY,SAAS;AAErF,UAAQ,eAAe,MAAM;AAAA,IAC3B,KAAK;AACH,aAAO,4BAA4B,aAAa,eAAe,MAAO,OAAO;AAAA,IAE/E,KAAK;AAAA,IACL,KAAK;AACH,aAAO,wBAAwB,aAAa,eAAe,cAAe;AAAA,QACxE,GAAG;AAAA,QACH,YAAY,eAAe;AAAA,MAC7B,CAAC;AAAA,IAEH,KAAK;AACH,aAAO,uBAAuB,aAAa,eAAe,QAAS;AAAA,QACjE,GAAG;AAAA,QACH,QAAQ,eAAe;AAAA,QACvB,SAAS,eAAe;AAAA,MAC1B,CAAC;AAAA,IAEH;AACE,YAAM,IAAI,MAAM,+BAA+B,eAAe,IAAI,EAAE;AAAA,EACxE;AACF;AAOA,eAAe,yBACb,aACA,SACoC;AACpC,MAAM,MAAM,YAAY,WAGlB,mBAAmB,MAAM,iCAAiC,aAAa,SAAS,SAAS;AAG/F,QAAM,0BAA0B,GAAG;AAGnC,MAAM,cAAc,uBAAuB,GAAG,GACxC,UAAU,MAAM,gBAAgB,WAAW,GAE3C,OAAS,QAAgB,YAAa,QAAgB,gBAAgB,CAAC,GACvE,UAAa,QAAgB,mBAAoB,QAAgB,kBAAkB,KAAK,CAAC,GACzF,kBAAkB,CAAC,GAAG,MAAM,GAAG,OAAO,EAAE,OAAO,OAAO,EAAE,SAAS;AAEvE,SAAO,EAAE,kBAAkB,oBAAoB,MAAM,gBAAgB;AACvE;AAsEA,SAAS,4BACP,cACA,UACA,UACoB;AACpB,MAAM,uBAAuBC,MAAK,UAAU,aAAa,YAAY,GAC/D,iBAAiBC,UAAS,UAAU,oBAAoB,EAC3D,QAAQ,OAAO,GAAG,EAClB,QAAQ,UAAU,EAAE;AAEvB,MAAK;AAIL,WAAI,aAAa,iBAAiB,cAEzB,GADY,eAAe,QAAQ,OAAO,EAAE,CAC/B,QAGf;AACT;AAMO,SAAS,uCACd,UACA,UACuB;AACvB,SAAO,SAAS,IAAI,SACd,GAAG,OAAO,SAAS,WACrB,GAAG,OAAO,YAAY,WAEjB,GACR;AACH;AAUO,SAAS,6BACd,aACA,eACA,UACuB;AACvB,MAAM,eAAe,YAAY,gBAAgB,YAAY,OAAO,eAAe,YAAY,WACzF,eAAe,YAAY,iBAAiBA,UAAS,UAAU,YAAY,KAAK;AAEtF,SAAO,cAAc,IAAI,UAAQ;AAC/B,QAAM,gBAAgB,YAAY,gBAAgB,KAAK,UACjD,iBAAiB,4BAA4B,MAAM,UAAU,aAAa,KAAK,YAAY,gBAI3F,eAAe,GACnB,kBACA,CAAC,eAAe,SAAS,GAAG,KAC5B,CAAC,eAAe,SAAS,GAAG,KAC5B,CAAC,eAAe,SAAS,GAAG,IAExB,WAAW,YAAY,OAAO,aAC9B,aAAa,eAAe,GAAG,QAAQ,IAAI,cAAc,KAAK,UAE9D,SAAwB;AAAA,MAC5B,GAAG,YAAY;AAAA,MACf,aAAa;AAAA,MACb,cAAc,KAAK;AAAA,MACnB,iBAAiB,KAAK;AAAA,IACxB,GAEI,mBAAmB,YAAY;AACnC,WAAI,gBAAgB,YAAY,iBAAiB,SAAS,IACxD,mBAAmB,YAAY,iBAAiB,IAAI,UAAQ;AAAA,MAC1D,GAAG;AAAA,MACH,MAAM,IAAI,SAAS,aAAa,IAAI;AAAA,IACtC,EAAE,IACO,YAAY,iBAAiB,WAAW,MACjD,mBAAmB,CAAC,IAGf;AAAA,MACL,GAAG;AAAA,MACH;AAAA,MACA;AAAA,MACA,UAAU,CAAC;AAAA,MACX,QAAQ,CAAC;AAAA,MACT,cAAc;AAAA,MACd,cAAc,iBAAiB,KAAK,MAAM;AAAA,MAC1C,YAAY,YAAY;AAAA,MACxB;AAAA,MACA,cAAc;AAAA,IAChB;AAAA,EACF,CAAC;AACH;;;AezTO,IAAM,kBAAN,cAA8B,MAAM;AAAA,EACzC,YACS,QACP,SACO,OACP;AACA,UAAM,OAAO;AAJN;AAEA;AAGP,SAAK,OAAO;AAAA,EACd;AACF;;;ACtGA,OAAO,QAAQ;AACf,OAAO,UAAU;AAQjB,OAAOC,aAAY;AAiBnB,SAAS,2BAA2B,aAA6B;AAC/D,MAAM,iBAAiB,qBAAqB,WAAW,GACjD,cAAc,eAAe,MAAM,oBAAoB,GACvD,UAAU,KAAK,KAAK,GAAG,QAAQ,GAAG,aAAa,aAAa,iBAAiB,QAAQ;AAE3F,MAAI,aAAa;AACf,QAAM,CAAC,EAAE,OAAO,SAAS,IAAI;AAC7B,WAAO,KAAK,KAAK,SAAS,IAAI,KAAK,IAAI,SAAS;AAAA,EAClD;AAEA,SAAO,KAAK,KAAK,SAAS,cAAc;AAC1C;AAEA,eAAe,yBAAyB,gBAAgD;AACtF,MAAM,eAAe,KAAK,KAAK,gBAAgB,iBAAiB;AAChE,MAAI,CAAE,MAAM,OAAO,YAAY;AAC7B,WAAO;AAGT,MAAI;AAEF,YADiB,MAAM,gBAAgB,YAAY,GACnC,WAAW;AAAA,EAC7B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,eAAsB,6BACpB,KACA,aACmC;AACnC,MAAM,iBAAiB,mBAAmB,KAAK,WAAW,GACpD,UAAU,MAAM,yBAAyB,cAAc;AAC7D,SAAK,UAIE,EAAE,MAAM,oBAAoB,gBAAgB,QAAQ,IAHlD;AAIX;AAEA,eAAsB,0BACpB,aACmC;AACnC,MAAM,iBAAiB,2BAA2B,WAAW,GACvD,UAAU,MAAM,yBAAyB,cAAc;AAC7D,SAAK,UAIE,EAAE,MAAM,iBAAiB,gBAAgB,QAAQ,IAH/C;AAIX;AAEA,eAAsB,mCAAmC,MAIpB;AACnC,MAAM,EAAE,KAAK,aAAa,KAAK,IAAI;AAEnC,MAAI,SAAS;AACX,WAAO,EAAE,eAAe,CAAC,EAAE;AAG7B,MAAM,kBAAkB,MAAM,6BAA6B,KAAK,WAAW;AAC3E,MAAI;AACF,WAAO;AAAA,MACL,eAAe,CAAC,gBAAgB,OAAO;AAAA,MACvC,YAAY,gBAAgB;AAAA,MAC5B,aAAa,gBAAgB;AAAA,IAC/B;AAGF,MAAM,gBAAgB,MAAM,0BAA0B,WAAW;AACjE,SAAI,gBACK;AAAA,IACL,eAAe,CAAC,cAAc,OAAO;AAAA,IACrC,YAAY,cAAc;AAAA,IAC1B,aAAa,cAAc;AAAA,EAC7B,IAIK;AAAA,IACL,eAFuB,MAAM,oBAAoB,WAAW;AAAA,IAG5D,YAAY;AAAA,EACd;AACF;AAIA,IAAI;AAEJ,eAAsB,0BAA0B,MAI5B;AAClB,SAAK,4BACH,2BAA2B,MAAM,OAAO,wCAAkD,GAAG,0BAE9E,MAAM,wBAAwB,KAAK,aAAa,KAAK,KAAK,EAAE,SAAS,KAAK,QAAQ,CAAC,GACpF;AAClB;;;AChIA,OAAOC,WAAU;;;ACAjB,YAAYC,aAAY;;;ACAxB,YAAYC,WAAU;;;ACOf,SAAS,qBAAqBC,OAAsB;AACzD,MAAM,UAAUA,MAAK,WAAW,GAAG,IAAIA,MAAK,MAAM,CAAC,IAAIA;AACvD,SAAO,2BAA2B,OAAO;AAC3C;AAKO,SAAS,eAAeA,OAAuB;AACpD,MAAM,aAAa,qBAAqBA,KAAI;AAC5C,SACE,eAAe,cAAc,mBAC7B,eAAe,cAAc;AAEjC;;;AC+CO,SAASC,sBAAqB,KAA2B;AAC9D,MAAM,YAAY,IAAI,IAAI,qBAAyB,GAAG,CAAC;AACvD,mBAAU,IAAI,cAAc,SAAS,GACrC,UAAU,IAAI,cAAc,SAAS,GACrC,UAAU,IAAI,cAAc,SAAS,GACrC,UAAU,IAAI,cAAc,OAAO,GACnC,UAAU,IAAI,cAAc,OAAO,GAC5B;AACT;AAEO,SAAS,mBAAmB,UAAkB,KAAuB;AAC1E,SAAOA,sBAAqB,GAAG,EAAE,IAAI,QAAQ;AAC/C;;;ACrEO,SAAS,sBAAsB,cAA8B;AAClE,SAAO,2BAA2B,YAAY;AAChD;AAEO,SAAS,mBAAmB,cAA+B;AAEhE,MAAM,WADa,sBAAsB,YAAY,EACzB,MAAM,GAAG,EAAE,IAAI;AAC3C,SAAO,CAAC,CAAC,YAAY,mBAAmB,QAAQ;AAClD;AAEO,SAAS,wBAAwB,cAAsB,KAAuB;AACnF,MAAM,aAAa,sBAAsB,YAAY;AAGrD,MAAI,eAAe,UAAU;AAC3B,WAAO;AAIT,MAAI,CAAC,WAAW,SAAS,cAAc,QAAQ;AAC7C,WAAO;AAGT,MAAM,WAAW,WAAW,MAAM,GAAG,EAAE,IAAI;AAC3C,MAAI,CAAC;AACH,WAAO;AAGT,MAAM,QAAQ,SAAS,MAAM,GAAG;AAChC,MAAI,MAAM,SAAS;AACjB,WAAO;AAGT,MAAM,mBAAmB,MAAM,MAAM,SAAS,CAAC;AAC/C,SAAO,aAAa,gBAAgB;AACtC;AAUO,SAAS,sBAAsB,cAAsB,KAAuB;AACjF,MAAM,aAAa,sBAAsB,YAAY;AAIrD,SAAI,mBAAmB,UAAU,IAAU,KAGvC,wBAAwB,YAAY,GAAG,IAAU,KAG9C,wBAAwB,YAAY,GAAG;AAChD;AAcO,SAAS,2BACd,cACA,KACqD;AACrD,MAAM,aAAa,sBAAsB,YAAY;AAGrD,MAAI,CAAC,wBAAwB,YAAY,GAAG;AAC1C,WAAO;AAIT,MAAM,QAAQ,WAAW,MAAM,GAAG,GAC5B,iBAAiB,MAAM,CAAC;AAG9B,MAAI,CAAC,kBAAkB,eAAe,SAAS,GAAG;AAChD,WAAO;AAIT,MAAM,UAAU,MAAM,MAAM,CAAC,EAAE,KAAK,GAAG;AACvC,SAAO;AAAA,IACL,iBAAiB;AAAA,IACjB;AAAA,EACF;AACF;;;AC5GA,YAAYC,WAAU;AAMtB,IAAM,gBAAgB,2BAA2B,cAAc,cAAc;AAQtE,SAAS,kBAAkB,MAAqB,UAAwC;AAC7F,MAAM,SAAS,oBAAI,IAAyB,GAEtC,SAAS,CAAC,SAAwB;AACtC,aAAW,QAAQ,MAAM;AACvB,UAAM,aAAa,2BAA2B,KAAK,IAAI,KAAK,KAAK;AACjE,MAAI,eAAe,iBACnB,OAAO,IAAI,YAAY,EAAE,GAAG,MAAM,MAAM,WAAW,CAAC;AAAA,IACtD;AAAA,EACF;AAEA,gBAAO,IAAI,GACX,OAAO,QAAQ,GAER,MAAM,KAAK,OAAO,OAAO,CAAC;AACnC;;;AC7BA,SAAS,WAAAC,gBAAe;AACxB,SAAS,QAAAC,aAAY;;;ACDrB,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,QAAAC,OAAM,YAAAC,iBAAgB;AAC/B,SAAS,eAAe;AAyCjB,SAAS,kBAAkB,KAAqB;AACrD,MAAM,aAAa,gBAAgB,GAAG;AAItC,SAHaC,YAAW,QAAQ,EAAE,OAAO,UAAU,EAAE,OAAO,KAAK,EAGrD,UAAU,GAAG,EAAE;AAC7B;AAMO,SAAS,iBAAyB;AACvC,SAAOC,MAAK,QAAQ,GAAG,gBAAgB,SAAS,KAAK;AACvD;AAMO,SAAS,mBAAmB,KAAqB;AACtD,MAAM,UAAU,kBAAkB,GAAG,GAC/B,WAAW,eAAe;AAChC,SAAOA,MAAK,UAAU,OAAO;AAC/B;AAMO,SAAS,qBAAqB,KAAa,WAA2B;AAC3E,MAAM,UAAU,mBAAmB,GAAG,GAChC,WAAW,UAAU,UAAU,GAAG,CAAC;AACzC,SAAOA,MAAK,SAAS,QAAQ;AAC/B;AAuBA,SAAS,oBAAoB,SAAyB;AACpD,SAAOC,MAAK,SAAS,iBAAiB;AACxC;AAKA,SAAS,sBAAsB,WAA2B;AACxD,SAAOA,MAAK,WAAW,mBAAmB;AAC5C;AAKA,eAAsB,kBACpB,SACA,UACe;AACf,MAAM,WAAW,oBAAoB,OAAO;AAC5C,QAAM,UAAU,OAAO,GACvB,MAAM,cAAc,UAAU,KAAK,UAAU,UAAU,MAAM,CAAC,CAAC;AACjE;AA0BA,eAAsB,oBACpB,WACA,UACe;AACf,MAAM,WAAW,sBAAsB,SAAS;AAChD,QAAM,UAAU,SAAS,GACzB,MAAM,cAAc,UAAU,KAAK,UAAU,UAAU,MAAM,CAAC,CAAC;AACjE;AAKA,eAAsB,mBACpB,WACmC;AACnC,MAAM,WAAW,sBAAsB,SAAS;AAEhD,MAAI,CAAE,MAAM,OAAO,QAAQ;AACzB,WAAO;AAGT,MAAI;AACF,QAAM,UAAU,MAAM,aAAa,QAAQ;AAC3C,WAAO,KAAK,MAAM,OAAO;AAAA,EAC3B,SAAS,OAAO;AACd,kBAAO,KAAK,qCAAqC,QAAQ,IAAI,EAAE,MAAM,CAAC,GAC/D;AAAA,EACT;AACF;AAKA,eAAsB,gBAAgB,WAAkC;AACtE,MAAM,WAAW,MAAM,mBAAmB,SAAS;AAEnD,EAAI,aACF,SAAS,gBAAe,oBAAI,KAAK,GAAE,YAAY,GAC/C,MAAM,oBAAoB,WAAW,QAAQ;AAEjD;AAKA,eAAsB,eAAe,KAAa,WAAqC;AACrF,MAAM,YAAY,qBAAqB,KAAK,SAAS;AACrD,SAAO,MAAM,OAAO,SAAS;AAC/B;;;AD5LA,IAAM,kBAAkB,MAAU;AA4BlC,SAAS,qBAA6B;AACpC,SAAOC,MAAKC,SAAQ,GAAG,gBAAgB,SAAS,UAAU;AAC5D;AAEA,SAAS,sBAA8B;AACrC,SAAOD,MAAKC,SAAQ,GAAG,gBAAgB,SAAS,UAAU;AAC5D;AAEA,SAAS,mBAAmB,KAAqB;AAC/C,MAAM,UAAU,kBAAkB,GAAG;AACrC,SAAOD,MAAK,mBAAmB,GAAG,GAAG,OAAO,OAAO;AACrD;AAEA,SAAS,qBAAqB,MAAsB;AAClD,MAAM,WAAW,KAAK,QAAQ,OAAO,IAAI;AACzC,SAAOA,MAAK,oBAAoB,GAAG,GAAG,QAAQ,OAAO;AACvD;AAEA,SAAS,UAAU,WAAmB,OAAwB;AAC5D,MAAM,cAAc,IAAI,KAAK,SAAS,EAAE,QAAQ;AAChD,SAAO,KAAK,IAAI,IAAI,cAAc;AACpC;AAEA,eAAe,aAAgB,UAAqC;AAClE,MAAI,CAAE,MAAM,OAAO,QAAQ;AACzB,WAAO;AAGT,MAAI;AACF,QAAM,UAAU,MAAM,aAAa,QAAQ;AAC3C,WAAO,KAAK,MAAM,OAAO;AAAA,EAC3B,SAAS,OAAO;AACd,kBAAO,KAAK,gCAAgC,QAAQ,IAAI,EAAE,MAAM,CAAC,GAC1D;AAAA,EACT;AACF;AAEA,eAAe,cAAiB,UAAkB,MAAwB;AACxE,MAAM,MAAMA,MAAK,UAAU,IAAI;AAC/B,QAAM,UAAU,GAAG,GACnB,MAAM,cAAc,UAAU,KAAK,UAAU,MAAM,MAAM,CAAC,CAAC;AAC7D;AAEO,SAAS,qBAAmC;AACjD,SAAO;AAAA,IACL,MAAM,sBAAsB,KAAa,KAAqC;AAC5E,UAAM,YAAY,mBAAmB,GAAG,GAClC,QAAQ,MAAM,aAA0B,SAAS;AAEvD,aAAK,OAAO,OAAO,GAAG,IAMf,MAAM,KAAK,GAAG,EAAE,SALd;AAAA,IAMX;AAAA,IAEA,MAAM,eAAe,KAAa,KAAa,QAA+B;AAC5E,UAAM,YAAY,mBAAmB,GAAG,GACpC,QAAQ,MAAM,aAA0B,SAAS;AAErD,MAAK,UACH,QAAQ,EAAE,MAAM,CAAC,EAAE,IAGrB,MAAM,KAAK,GAAG,IAAI;AAAA,QAChB;AAAA,QACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,MACpC,GAEA,MAAM,cAAc,WAAW,KAAK,GACpC,OAAO,MAAM,kBAAkB,GAAG,OAAO,OAAO,UAAU,GAAG,CAAC,CAAC,IAAI,EAAE,IAAI,CAAC;AAAA,IAC5E;AAAA,IAEA,MAAM,oBAAoB,MAAc,SAAyC;AAC/E,UAAM,cAAc,sBAAsB,MAAM,OAAO;AAEvD,aAAI,MAAM,OAAO,WAAW,IACnB,cAGF;AAAA,IACT;AAAA,IAEA,MAAM,gBAAgB,MAAc,SAAmC;AACrE,UAAM,cAAc,sBAAsB,MAAM,OAAO;AACvD,aAAO,MAAM,OAAO,WAAW;AAAA,IACjC;AAAA,IAEA,MAAM,kBAAkB,MAA8C;AACpE,UAAM,YAAY,qBAAqB,IAAI,GACrC,QAAQ,MAAM,aAA6B,SAAS;AAE1D,aAAK,QAID,UAAU,MAAM,WAAW,eAAe,KAC5C,OAAO,MAAM,8BAA8B,IAAI,IAAI,EAAE,WAAW,MAAM,UAAU,CAAC,GAC1E,QAGF,QARE;AAAA,IASX;AAAA,IAEA,MAAM,cAAc,MAAc,UAAoB,MAA8B;AAClF,UAAM,YAAY,qBAAqB,IAAI,GAErC,QAAwB;AAAA,QAC5B;AAAA,QACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,GAAI,QAAQ,EAAE,KAAK;AAAA,MACrB;AAEA,YAAM,cAAc,WAAW,KAAK,GACpC,OAAO,MAAM,uBAAuB,IAAI,IAAI,EAAE,cAAc,SAAS,OAAO,CAAC;AAAA,IAC/E;AAAA,EACF;AACF;;;ALvIA,IAAM,wBAAwB;AAE9B,SAAS,sBAAsB,OAAyB;AACtD,SAAO,OAAO,SAAU,YAAY,sBAAsB,KAAK,KAAK;AACtE;AAEA,SAAS,iBAAiB,OAAuB;AAC/C,MAAI,sBAAsB,MAAM,OAAO;AACrC,WAAO;AAGT,MAAM,QAAS,MAAc;AAK7B,SAJI,aAAU,sBAAsB,MAAM,OAAO,KAAK,sBAAsB,MAAM,IAAI,KAAK,sBAAsB,MAAM,KAAK,MAIxH,sBAAuB,MAAc,IAAI,KAAK,sBAAuB,MAAc,KAAK;AAK9F;AA+EA,SAAS,uBAAuB,OAA4B;AAC1D,MAAI,CAAC,SAAS,MAAM,WAAW;AAC7B,WAAO,CAAC;AAGV,MAAM,aAAa,MAChB,OAAO,CAAAE,UAAQ,OAAOA,SAAS,QAAQ,EACvC,IAAI,CAAAA,UAAQA,MAAK,WAAW,GAAG,IAAIA,MAAK,MAAM,CAAC,IAAIA,KAAI,EACvD,IAAI,CAAAA,UAAQ,sBAAsBA,KAAI,CAAC,EACvC,OAAO,CAAAA,UAAQA,MAAK,SAAS,CAAC;AAEjC,SAAO,MAAM,KAAK,IAAI,IAAI,UAAU,CAAC;AACvC;AAEO,SAAS,kBACd,MACA,SACA,SACQ;AACR,MAAM,cAAc,mBAAmB,IAAI,GAErC,WADa,WAAW,YAAY,WAEtC,0BAA0B,WAAW,MAAM,mBAAmB,OAAiB,CAAC,KAChF,0BAA0B,WAAW,IAEnC,SAAmB,CAAC;AAC1B,EAAI,SAAS,aACX,OAAO,KAAK,gBAAgB;AAG9B,MAAM,kBAAkB,uBAAuB,SAAS,KAAK;AAC7D,MAAI,gBAAgB,SAAS,GAAG;AAC9B,QAAM,eAAe,gBAAgB,IAAI,CAAAA,UAAQ,mBAAmBA,KAAI,CAAC,EAAE,KAAK,GAAG;AACnF,WAAO,KAAK,SAAS,YAAY,EAAE,GACnC,OAAO,KAAK,sBAAsB;AAAA,EACpC;AAEA,MAAI,OAAO,WAAW;AACpB,WAAO;AAGT,MAAM,YAAY,SAAS,SAAS,GAAG,IAAI,MAAM;AACjD,SAAO,GAAG,QAAQ,GAAG,SAAS,GAAG,OAAO,KAAK,GAAG,CAAC;AACnD;AAeO,SAAS,wBACd,cACiE;AAEjE,MAAI,UAAU;AACd,MAAI,aAAa,WAAW,KAAK;AAE/B,aAAS,IAAI,aAAa,SAAS,GAAG,KAAK,GAAG;AAC5C,UAAI,aAAa,CAAC,MAAM,OAAO,MAAM,GAAG;AACtC,kBAAU;AACV;AAAA,MACF;AAAA;AAGF,cAAU,aAAa,YAAY,GAAG;AAGxC,MAAI,WAAW,KAAK,YAAY,aAAa,SAAS;AACpD,UAAM,IAAI,MAAM,0BAA0B,YAAY,2CAA2C;AAGnG,MAAM,UAAU,aAAa,MAAM,GAAG,OAAO,GACvC,aAAa,aAAa,MAAM,UAAU,CAAC,GAG7C,aACA;AACJ,MAAI,QAAQ,WAAW,KAAK,GAAG;AAE7B,QAAM,WAAW,QAAQ,MAAM,GAAG;AAClC,QAAI,SAAS,SAAS;AACpB,YAAM,IAAI,MAAM,4CAA4C,YAAY,IAAI;AAG9E,kBAAc,SAAS,MAAM,GAAG,CAAC,EAAE,KAAK,GAAG,GAEvC,SAAS,SAAS,MACpB,WAAW,SAAS,MAAM,CAAC,EAAE,KAAK,GAAG;AAAA,EAEzC,WAAW,QAAQ,WAAW,GAAG,GAAG;AAElC,QAAM,WAAW,QAAQ,MAAM,GAAG;AAClC,QAAI,SAAS,SAAS;AACpB,YAAM,IAAI,MAAM,4CAA4C,YAAY,IAAI;AAE9E,kBAAc,SAAS,MAAM,GAAG,CAAC,EAAE,KAAK,GAAG,GAC3C,WAAW,SAAS,SAAS,IAAI,SAAS,MAAM,CAAC,EAAE,KAAK,GAAG,IAAI;AAAA,EACjE,OAAO;AAEL,QAAM,WAAW,QAAQ,MAAM,GAAG;AAClC,kBAAc,SAAS,CAAC,GACxB,WAAW,SAAS,SAAS,IAAI,SAAS,MAAM,CAAC,EAAE,KAAK,GAAG,IAAI;AAAA,EACjE;AAGA,MAAM,kBAAkB,WAAW,MAAM,GAAG,GACtC,UAAU,gBAAgB,CAAC,GAC3B,cAAc,gBAAgB,SAAS,IAAI,gBAAgB,MAAM,CAAC,EAAE,KAAK,GAAG,IAAI;AAEtF,MAAI,CAAC,eAAe,CAAC;AACnB,UAAM,IAAI,MAAM,0BAA0B,YAAY,2CAA2C;AAGnG,MAAM,oBAAoB,CAAC,UAAU,WAAW,EAAE,OAAO,OAAO,GAC1D,eAAe,kBAAkB,SAAS,IAAI,kBAAkB,KAAK,GAAG,IAAI;AAElF,SAAO,EAAE,aAAa,SAAS,aAAa;AAC9C;AAkCO,SAAS,kBAAkB,UAAyC;AACzE,SAAO;AACT;AAmLA,eAAsB,2BACpB,MACA,SACA,UAA6B,CAAC,GACQ;AACtC,MAAI;AACF,UAAM,0BAA0B;AAEhC,QAAM,UAAU,MAAM,cAAc,OAAO,GACrC,WAAW,MAAM;AAAA,MACrB,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,MACR,QAAQ;AAAA,IACV;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,MACA;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,WAAO,kBAAkB,KAAK;AAAA,EAChC;AACF;AAEA,eAAsB,sBACpB,MACA,SACA,UAA6B,CAAC,GACH;AAI3B,MAAM,kBAAkB,QAAQ,SAC1B,MAAM,QAAQ,QAAQ,SAAY,QAAQ,QAC1C,eAAgB,kBAAmC,SAAjB,KAAK,QAAQ,GAC/C,UAAU,mBAAmB;AAEnC,MAAI;AAGF,QAAI,WAAW,YAAY,YAAY,CAAC,QAAQ,gBAAgB;AAE9D,UAAM,YAAY,MADG,mBAAmB,EACH,oBAAoB,MAAM,OAAO;AACtE,UAAI;AACF,sBAAO,MAAM,kEAAkE,EAAE,MAAM,SAAS,UAAU,CAAC,GACpG;AAAA,UACL,SAAS;AAAA,UACT;AAAA,UACA;AAAA,UACA,UAAU,CAAC;AAAA,UACX,WAAW,EAAE,OAAO,CAAC,GAAG,UAAU,GAAG;AAAA,UACrC,aAAa;AAAA,UACb,SAAS;AAAA,UACT,aAAa;AAAA,UACb,aAAa;AAAA,QACf;AAAA,IAEJ;AAEA,QAAM,eAAe,UAAU,IAAI,OAAO,KAAK;AAC/C,IAAI,kBACF,SAAS,QAAQ,YAAY,IAAI,GAAG,YAAY,gBAAgB,IAEhE,cAAc,MAAM,YAAY,IAAI,GAAG,YAAY,gBAAgB;AAGrE,QAAM,iBAAiB,QAAQ,qBAC3B,MAAM,2BAA2B,OAAO,IACxC,MAAM,2BAA2B,MAAM,SAAS,OAAO;AAE3D,QAAI,CAAC,eAAe;AAClB,2BAAc,KAAK,GACZ;AAGT,QAAM,EAAE,SAAS,SAAS,IAAI,gBACxB,kBAAkB,uBAAuB,QAAQ;AACvD,QAAI,CAAC,iBAAiB;AACpB,2BAAc,KAAK,GACZ;AAAA,QACL,SAAS;AAAA,QACT,QAAQ;AAAA,QACR,SAAS;AAAA,MACX;AAGF,QAAM,kBAAkB,mBAAmB,SAAS,QAAQ,OAAO;AACnE,aAAS,QAAQ,eAAe,IAAI,IAAI,eAAe,EAAE;AAEzD,QAAM,YAAY,kBAAkB,eAAe,GAC7C,gBAAgB,MAAM,uBAAuB,QAAQ,YAAY,gBAAgB,WAAW,GAE5F,eAAe,YAAY,SAAY,SAAS,QAAQ;AAC9D,QAAI,CAAC,uBAAuB,eAAe,YAAY;AACrD,2BAAc,KAAK,GACZ;AAAA,QACL,SAAS;AAAA,QACT,QAAQ;AAAA,QACR,SAAS;AAAA,MACX;AAGF,aAAS,QAAQ,cAAc,IAAI,IAAI,eAAe,EAAE;AACxD,QAAM,YAAY,MAAM,0BAA0B,aAAa;AAE/D,iBAAM,2BAA2B,UAAU,WAAW;AAAA,MACpD,SAAS;AAAA,IACX,CAAC,GAEG,kBACF,SAAS,QAAQ,WAAW,IAAI,IAAI,eAAe,EAAE,IAErD,cAAc,KAAK,WAAW,IAAI,IAAI,eAAe,EAAE,GAGlD;AAAA,MACL,SAAS;AAAA,MACT,MAAM,SAAS,QAAQ;AAAA,MACvB,SAAS;AAAA,MACT;AAAA,MACA;AAAA,MACA,aAAa,QAAQ;AAAA,MACrB,SAAS,QAAQ;AAAA,MACjB,aAAa,gBAAgB;AAAA,MAC7B,aAAa,SAAS,QAAQ;AAAA,IAChC;AAAA,EACF,SAAS,OAAO;AACd,yBAAc,KAAK,GACZ,kBAAkB,KAAK;AAAA,EAChC;AACF;AAEA,SAAS,uBAAuB,UAAgE;AAC9F,MAAI,CAAC,MAAM,QAAQ,SAAS,SAAS,KAAK,SAAS,UAAU,WAAW;AACtE;AAGF,MAAM,eAAe,SAAS,UAAU,KAAK,cAAY,SAAS,SAAS,SAAS,QAAQ,QAAQ,SAAS,WAAW;AACxH,SAAI,cAAc,cACT,eAGa,SAAS,UAAU,KAAK,cAAY,SAAS,WAAW;AAEhF;AAEA,eAAe,2BAA2B,SAAkE;AAC1G,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI,MAAM,yCAAyC;AAK3D,SAAO;AAAA,IACL,SAAS;AAAA,IACT,SAJc,MAAM,cAAc,OAAO;AAAA,IAKzC,UAAU,QAAQ;AAAA,EACpB;AACF;AAEA,eAAe,cAAc,SAAwD;AACnF,MAAM,cAAc;AAAA,IAClB,SAAS,QAAQ;AAAA,IACjB,QAAQ,QAAQ;AAAA,EAClB,GAEM,aAAa,QAAQ,cAAc,MAAM,iBAAiB,WAAW,GACrE,UAAU,YAAY,kBAAkB,WAAW,GACnD,cAAc,YAAY,eAAe;AAE/C,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEA,eAAe,iBACb,YACA,MACA,SACA,WACA,OAC8B;AAC9B,MAAM,gBAAgB,kBAAkB,MAAM,SAAS,EAAE,WAAW,MAAM,CAAC;AAC3E,gBAAO,MAAM,oCAAoC;AAAA,IAC/C;AAAA,IACA,SAAS,WAAW;AAAA,IACpB,UAAU;AAAA,IACV,WAAW,CAAC,CAAC;AAAA,IACb,UAAU,CAAC,CAAC,SAAS,MAAM,SAAS;AAAA,EACtC,CAAC,GACM,MAAM,WAAW,IAAyB,aAAa;AAChE;AAEA,eAAe,uBAAuB,YAAwB,aAAsC;AAClG,MAAM,gBAAgB,MAAM;AAC1B,QAAI;AACF,aAAO,IAAI,IAAI,WAAW,EAAE;AAAA,IAC9B,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF,GAAG,GACG,gBAAgB,MAAM;AAC1B,QAAI;AACF,aAAO,IAAI,IAAI,YAAY,eAAe,CAAC,EAAE;AAAA,IAC/C,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF,GAAG,GACG,iBAAiB,iBAAiB,MAAM,iBAAiB,MAAM,iBAAiB,cAChF,SAAS,MAAM,WAAW,aAAa,aAAa,EAAE,UAAU,eAAe,CAAC;AACtF,SAAO,OAAO,KAAK,MAAM;AAC3B;AAEA,eAAe,2BACb,UACA,WACA,cAAqC,CAAC,GACvB;AACf,MAAM,WAAiD;AAAA,IACrD,MAAM,SAAS,QAAQ;AAAA,IACvB,SAAS,SAAS,QAAQ;AAAA,IAC1B,aAAa,SAAS,QAAQ;AAAA,IAC9B,UAAU,SAAS,QAAQ;AAAA,IAC3B,SAAS,SAAS,QAAQ;AAAA,EAC5B;AAEA,EAAC,SAAiB,QAAQ,UAAU,MAAM,IAAI,UAAQ,KAAK,IAAI,GAC9D,SAAiB,UAAU,SAAS,QAAQ,WAC5C,SAAiB,UAAU,SAAS,QAAQ;AAE7C,MAAI,QAAQ,UAAU;AAEtB,MAAI,YAAY;AACd,QAAI;AACF,UAAM,WAAW,MAAM,eAAe,YAAY,SAAS,QAAQ,MAAM,SAAS,QAAQ,OAAO;AACjG,cAAQ,kBAAkB,SAAS,OAAO,KAAK;AAAA,IACjD,QAAQ;AAAA,IAER;AAGF,QAAM,eAAe;AAAA,IACnB,EAAE,UAAkC,MAAM;AAAA,IAC1C,EAAE,SAAS,EAAQ,YAAY,QAAS;AAAA,EAC1C;AACF;AAEA,SAAS,kBAAkB,OAAmC;AAG5D,MAFA,OAAO,MAAM,gCAAgC,EAAE,MAAM,CAAC,GAElD,iBAAiB;AACnB,WAAO;AAAA,MACL,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,SAAS,MAAM;AAAA,MACf;AAAA,IACF;AAGF,MAAI,iBAAiB;AACnB,WAAO;AAAA,MACL,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,SAAS,MAAM;AAAA,MACf;AAAA,IACF;AAGF,MAAI,iBAAiB,OAAO;AAC1B,QAAM,WAAY,MAAc;AAEhC,WAAI,UAAU,eAAe,MACQ;AAAA,MACjC,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,SAAS,MAAM;AAAA,MACf,YAAY;AAAA,MACZ;AAAA,IACF,IAIE,UAAU,eAAe,OAAO,UAAU,eAAe,MACpD;AAAA,MACL,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,SAAS,MAAM;AAAA,MACf,YAAY,SAAS;AAAA,MACrB;AAAA,IACF,IAGE,iBAAiB,KAAK,IACjB;AAAA,MACL,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,SAAS,MAAM;AAAA,MACf;AAAA,IACF,IAGK;AAAA,MACL,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,SAAS,MAAM;AAAA,MACf;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,SAAS;AAAA,IACT,QAAQ;AAAA,IACR,SAAS;AAAA,IACT;AAAA,EACF;AACF;;;AOpwBO,SAAS,yBAAyB,SAAyB;AAChE,MAAM,cAAc,WAAW,IAAI,KAAK;AAExC,SAAK,aAKD,WAAW,SAAS,8BAA8B,IAC7C,iCAEL,iBAAiB,KAAK,UAAU,IAC3B,kBAEL,iBAAiB,KAAK,UAAU,IAC3B,kBAEL,0BAA0B,KAAK,UAAU,IACpC,2BAIL,gBAAgB,KAAK,UAAU,KAG/B,4CAA4C,KAAK,UAAU,IACtD,kBAIL,iBAAiB,KAAK,UAAU,IAC3B,iCAIL,sCAAsC,KAAK,UAAU,IAChD,kBAIL,sBAAsB,KAAK,UAAU,IAChC,2BAIL,WAAW,UAAU,KAChB,aAGF,GAAG,WAAW,MAAM,GAAG,EAAE,CAAC,QA7CxB;AA8CX;;;ACOO,SAAS,sBAAsB,OAAe,SAAoC;AACvF,UAAQ,QAAQ,QAAQ;AAAA,IACtB,KAAK;AACH,aAAO,YAAY,KAAK;AAAA,IAC1B,KAAK;AACH,aAAO,QAAQ,WAAW,yBAAyB,KAAK;AAAA,IAC1D,KAAK;AACH,aAAO,QAAQ,WAAW,yBAAyB,KAAK;AAAA,IAC1D,KAAK;AACH,aAAO,QAAQ,WAAW,kCAAkC,KAAK;AAAA,IACnE;AACE,aAAO,QAAQ,WAAW,kBAAkB,KAAK;AAAA,EACrD;AACF;;;AC3BA,eAAe,mBAAmB,MAAgC;AAChE,MAAM,aAAa,qBAAqB,IAAI;AAC5C,SAAO,MAAM,OAAO,eAAe,UAAU,CAAC;AAChD;AAEA,SAAS,yBAAyB,cAAsB,OAAuB;AAC7E,MAAM,kBAAkB,qBAAqB,MAAM,QAAQ,MAAM,EAAE,CAAC,GAC9D,iBAAiB,qBAAqB,YAAY;AACxD,SAAO,IAAI,eAAe,IAAI,cAAc;AAC9C;AAEA,eAAe,0BAA0B,MAA6B;AACpE,MAAI;AACF,wBAAoB,IAAI;AAAA,EAC1B,SAAS,OAAO;AACd,UAAM,IAAI,MAAO,MAAgB,QAAQ,QAAQ,MAAM,IAAI,CAAC;AAAA,EAC9D;AAEA,MAAI,CAAC,aAAa,IAAI;AACpB,UAAM,IAAI,MAAM,wCAAwC;AAG1D,MAAI,MAAM,mBAAmB,IAAI;AAC/B,UAAM,IAAI;AAAA,MACR,YAAY,IAAI;AAAA,IAClB;AAEJ;AAKA,eAAsB,0BAA0B,aAAmD;AACjG,SAAK,eAIU,MAAM,cAAc,OAAO,GACb,WAAW,WAAW,GAC7B,UAAU,QAL9B;AAMJ;AA0EA,eAAsB,sCACpB,cACA,UACA,aACA,QACiB;AACjB,MAAI,aAAa,YAAY;AAC3B,UAAM,IAAI,MAAM,qCAAqC,YAAY,GAAG;AAGtE,MAAI,CAAC,UAAU,KAAK;AAClB,UAAM,IAAI,MAAM,8CAA8C;AAGhE,MAAM,MAAM,UAAU,cAAc,GAC9B,iBAAiB,qBAAqB,YAAY,GAElD,SAAS,MAAM,IAAI;AAAA,IACvB,YAAY,cAAc;AAAA,IAC1B;AAAA,MACE;AAAA,QACE,OAAO,sBAAsB,QAAQ;AAAA,QACrC,OAAO;AAAA,QACP,aAAa,eAAe,QAAQ,IAAI,cAAc;AAAA,MACxD;AAAA,MACA;AAAA,QACE,OAAO;AAAA,QACP,OAAO;AAAA,QACP,aAAa,4BAA4B,cAAc;AAAA,MACzD;AAAA,IACF;AAAA,IACA;AAAA,EACF;AAEA,MAAI,CAAC;AACH,UAAM,IAAI,sBAAsB,6BAA6B;AAG/D,MAAI,QAAQ;AACZ,MAAI,WAAW,UAAU;AAEvB,QAAM,gBADe,MAAM,0BAA0B,WAAW,IAC7B,QAAQ,MAAM,EAAE,KAAK,UAElD,eAAe,MAAM,IAAI;AAAA,MAC7B,kCAAkC,cAAc;AAAA,MAChD;AAAA,QACE,SAAS;AAAA,QACT,UAAU,OAAO,UAAkB;AACjC,cAAI,CAAC,MAAO,QAAO;AAEnB,cAAM,YAAY,yBAAyB,gBAAgB,KAAK;AAChE,cAAI;AACF,yBAAM,0BAA0B,SAAS,GAClC;AAAA,UACT,SAAS,OAAO;AACd,mBAAQ,MAAgB;AAAA,UAC1B;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,QAAI,CAAC;AACH,YAAM,IAAI,sBAAsB,6BAA6B;AAG/D,YAAQ;AAAA,EACV;AAEA,MAAM,aAAa,yBAAyB,gBAAgB,KAAK;AACjE,eAAM,0BAA0B,UAAU,GACnC;AACT;;;AVjJA,eAAsB,+BAA+B,MAA+D;AAClH,MAAM,kBAAkB;AAAA,IACtB,KAAK,iBAAiB,MAAM,oBAAoB,KAAK,WAAW;AAAA,EAClE,GACI,iBAA2B,CAAC,GAC5B,eAAqD,WACrD,aACA,eACE,WAAqB,CAAC;AAE5B,MAAI,KAAK,SAAS;AAChB,QAAI,KAAK;AACP,uBAAiB,yBAAyB,KAAK,cAAc,GAC7D,eAAe;AAAA,SACV;AACL,UAAM,eAAe,MAAM,oBAAoB,KAAK,aAAa;AAAA,QAC/D,SAAS,KAAK;AAAA,QACd,QAAQ,KAAK;AAAA,QACb,WAAW,KAAK,SAAS;AAAA,MAC3B,CAAC;AAED,MAAI,aAAa,WACf,iBAAiB,yBAAyB,aAAa,QAAQ,GAC/D,eAAe,cAEf,eAAe,UACf,cAAc,sBAAsB,KAAK,aAAa,aAAa,OAAO,GAC1E,gBAAgB,aAAa;AAAA,IAEjC;AAGF,MAAI,KAAK,SAAS;AAChB,WAAO;AAAA,MACL,eAAe;AAAA,MACf,gBAAgB,CAAC;AAAA,MACjB,mBAAmB;AAAA,MACnB,cAAc;AAAA,MACd;AAAA,IACF;AAGF,MAAI,KAAK,SAAS,kBAAkB;AAClC,QAAI,iBAAiB;AACnB,YAAM,IAAI;AAAA,QACR,eAAe,4CAA4C,KAAK,WAAW;AAAA,MAC7E;AAGF,WAAO;AAAA,MACL,eAAe;AAAA,MACf;AAAA,MACA,mBAAmB;AAAA,MACnB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,MAAM,sBAAsB,iBAAiB;AAE7C,MAAI,uBAAuB,eAAe,aAAa,KAAK,WAAW,GAAG;AACxE,QAAM,SAAS,yBAAyB,WAAW;AACnD,aAAS,KAAK,4BAA4B,KAAK,WAAW,eAAe,MAAM,GAAG;AAAA,EACpF;AAEA,SAAO;AAAA,IACL,eAAe;AAAA,IACf;AAAA,IACA,mBAAmB,sBAAsB,kBAAkB,qBAAqB,iBAAiB,cAAc;AAAA,IAC/G;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEA,eAAsB,4BACpB,MAC+C;AAC/C,MAAM,yBAAkD;AAAA,IACtD,GAAI,KAAK,oBAAoB,CAAC;AAAA,IAC9B,GAAI,KAAK,2BAA2B,EAAE,0BAA0B,GAAK,IAAI,CAAC;AAAA,EAC5E,GAEMC,eAAc,CAAC,aACnB,KAAK,0BAA0B,KAAK,wBAAwB,QAAQ,IAAI,UAEpE,qBAAqB,CAAC,SAA+B,gBAAuC;AAChG,QAAM,mBAAmBA,aAAY,QAAQ,iBAAiB,GAExD,YAAY;AAAA,MAChB;AAAA,MACA,KAAK;AAAA,MACL;AAAA,IACF,GAEM,kBAAkB,UAAU,SAC9B;AACJ,QAAI,iBAAiB;AACnB,UAAM,UAAU,QAAQ,cAAc,SAAS,eAAe,GACxD,WAAW,QAAQ,eAAe,SAAS,eAAe;AAChE,MAAI,WAAW,CAAC,WACd,mBAAmB,UACV,CAAC,WAAW,WACrB,mBAAmB,WACV,WAAW,aACpB,mBAAmB,gBAAgB,mBAAmB,WAAW;AAAA,IAErE;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF,GAEM,aAAa;AAAA,IACjB,aAAa,KAAK;AAAA,IAClB,eAAe,KAAK;AAAA,IACpB,gBAAgB,KAAK;AAAA,IACrB,SAAS,KAAK;AAAA,IACd,QAAQ,KAAK;AAAA,EACf;AAEA,MAAI,KAAK,SAAS,cAAc;AAC9B,QAAM,UAAU,MAAM,+BAA+B;AAAA,MACnD,GAAG;AAAA,MACH,MAAM;AAAA,IACR,CAAC;AAED,WAAO;AAAA,MACL,GAFa,mBAAmB,SAAS,YAAY;AAAA,MAGrD,YAAY,KAAK;AAAA,MACjB,MAAM,KAAK;AAAA,IACb;AAAA,EACF;AAEA,MAAI,KAAK,SAAS,kBAAkB;AAClC,QAAM,UAAU,MAAM,+BAA+B;AAAA,MACnD,GAAG;AAAA,MACH,MAAM;AAAA,IACR,CAAC;AAED,WAAO;AAAA,MACL,GAFa,mBAAmB,SAAS,gBAAgB;AAAA,MAGzD,YAAY,KAAK;AAAA,MACjB,MAAM,KAAK;AAAA,IACb;AAAA,EACF;AAGA,MAAM,eAAe,MAAM,+BAA+B;AAAA,IACxD,GAAG;AAAA,IACH,MAAM;AAAA,EACR,CAAC,GACK,eAAe,mBAAmB,cAAc,YAAY;AAElE,MAAI,aAAa;AACf,WAAO;AAAA,MACL,GAAG;AAAA,MACH,YAAY,KAAK;AAAA,MACjB,MAAM,KAAK;AAAA,IACb;AAGF,MAAM,kBAAkB,MAAM,+BAA+B;AAAA,IAC3D,GAAG;AAAA,IACH,MAAM;AAAA,EACR,CAAC;AAED,MAAI,gBAAgB,iBAAiB,UAAU;AAC7C,QAAM,SACJ,gBAAgB,eAChB,iDAAiD,KAAK,WAAW;AACnE,UAAM,IAAI,MAAM,MAAM;AAAA,EACxB;AAIA,SAAO;AAAA,IACL,GAHsB,mBAAmB,iBAAiB,KAAK,IAAI;AAAA,IAInE,YAAY,KAAK;AAAA,IACjB,MAAM,KAAK;AAAA,EACb;AACF;AAEA,eAAe,oBACb,aACA,SACA,QACoC;AACpC,MAAM,MAAM,UAAU,cAAc,GAC9BC,gBAAe,mBAAmB;AAGxC,MAAI,CAAC,QAAQ,WAAW;AACtB,QAAM,aAAa,MAAMA,cAAa,kBAAkB,WAAW;AACnE,QAAI,cAAc,WAAW,SAAS,SAAS;AAC7C,aAAO,EAAE,SAAS,IAAM,UAAU,WAAW,SAAS;AAAA,EAE1D;AAEA,MAAM,UAAU,IAAI,QAAQ;AAC5B,UAAQ,MAAM,gCAAgC,WAAW,KAAK;AAE9D,MAAI;AACF,QAAM,iBAAiB,MAAM,2BAA2B,aAAa,QAAW;AAAA,MAC9E,SAAS,QAAQ;AAAA,MACjB,QAAQ,QAAQ;AAAA,MAChB,WAAW;AAAA,IACb,CAAC;AAED,QAAI,CAAC,eAAe;AAClB,aAAO,EAAE,SAAS,IAAO,SAAS,eAAe;AAGnD,QAAM,WAAW,kCAAkC,eAAe,QAAQ;AAG1E,WAAI,SAAS,SAAS,KACpB,MAAMA,cAAa,cAAc,aAAa,QAAQ,GAGjD,EAAE,SAAS,IAAM,SAAS;AAAA,EACnC,UAAE;AACA,YAAQ,KAAK;AAAA,EACf;AACF;AAEA,SAAS,kCAAkC,UAAyC;AAClF,MAAM,YAAY,oBAAI,IAAY,GAE5B,aAA6B,CAAC,GAC9B,aAAa,SAAS;AAC5B,EAAI,MAAM,QAAQ,YAAY,QAAQ,KACpC,WAAW,KAAK,GAAG,WAAW,QAAQ;AAGxC,MAAM,cAAc;AACpB,EAAI,MAAM,QAAQ,aAAa,QAAQ,KACrC,WAAW,KAAK,GAAG,YAAY,QAAQ,GAErC,MAAM,QAAQ,aAAa,iBAAiB,KAC9C,WAAW,KAAK,GAAG,YAAY,iBAAiB;AAGlD,WAAW,aAAa,YAAY;AAClC,QAAM,aAAa,qBAAqB,SAAS;AACjD,IAAI,cACF,UAAU,IAAI,UAAU;AAAA,EAE5B;AAEA,MAAI,SAAS,SAAS;AACpB,QAAM,oBAAoB,SAAS,QAAQ,WAAW;AACtD,cAAU,IAAI,iBAAiB;AAAA,EACjC;AAEA,SAAO,MAAM,KAAK,SAAS;AAC7B;AAEA,SAAS,qBAAqB,WAAmC;AAC/D,MAAI,OAAO,aAAc;AACvB,WAAc,cAAM,SAAS,IAAI,YAAY;AAG/C,MAAI,aAAa,OAAO,aAAc,UAAU;AAC9C,QAAM,QAAS,UAAkB;AACjC,QAA2B,SAAU;AACnC,aAAO;AAET,QAAI,OAAO,SAAU,YACR,cAAM,KAAK;AACpB,aAAO;AAAA,EAGb;AAEA,SAAO;AACT;AAEA,SAAS,yBAAyB,UAA8B;AAC9D,MAAM,aAAa,oBAAI,IAAY;AACnC,WAAW,WAAW,UAAU;AAC9B,QAAI,OAAO,WAAY;AACrB;AAEF,QAAM,UAAU,QAAQ,KAAK;AAC7B,IAAK,WAGO,cAAM,OAAO,KAGzB,WAAW,IAAI,OAAO;AAAA,EACxB;AAEA,SADe,MAAM,KAAK,UAAU,EAAE,KAAY,gBAAQ;AAE5D;AAEA,SAAS,qBAAqB,MAAgB,OAA2B;AACvE,MAAM,SAAS,oBAAI,IAAY;AAE/B,WAAW,WAAW,CAAC,GAAG,MAAM,GAAG,KAAK;AACtC,IAAW,cAAM,OAAO,KACtB,OAAO,IAAI,OAAO;AAKtB,SADe,MAAM,KAAK,MAAM,EAAE,KAAY,gBAAQ;AAExD;;;AD7WA,eAAsB,uBACpB,aACA,UAAyC,CAAC,GACH;AACvC,MAAM,iBAAiB,qBAAqB,WAAW,GACjD,OAA8B,QAAQ,QAAQ,WAC9C,aAAa,QAAQ,cAAc,4BAGrC;AACJ,EAAI,QAAQ,OAAO,SAAS,qBAM1B,iBALmB,MAAM,mCAAmC;AAAA,IAC1D,KAAK,QAAQ;AAAA,IACb,aAAa;AAAA,IACb;AAAA,EACF,CAAC,GAC0B;AAG7B,MAAM,YAAY,MAAM,4BAA4B;AAAA,IAClD,aAAa;AAAA,IACb;AAAA,IACA;AAAA,IACA;AAAA,IACA,0BAA0B,QAAQ;AAAA,IAClC,SAAS,QAAQ;AAAA,IACjB,QAAQ,QAAQ;AAAA,EAClB,CAAC;AAED,MAAI,CAAC,UAAU;AACb,UAAM,IAAI;AAAA,MACR,oCAAoC,cAAc,sBAAsB,UAAU;AAAA,IACpF;AAGF,MAAM,UAAU,UAAU,iBACpB,EAAE,UAAU,aAAa,IAAI,uBAAuB,GAEpD,eAAeC,MAAK,KAAK,cAAc,gBAAgB,SAASA,MAAK,GAAG,GACxE,eAAe,GAAG,uBAAuB,IAAI,GAAG,cAAc,IAAI,OAAO;AAE/E,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,kBAAkB,UAAU;AAAA,EAC9B;AACF;;;AYzEA,SAAS,QAAAC,aAAY;AAKd,IAAM,uBAAN,MAA0D;AAAA,EAC/D,UAAU,QAAgC;AACxC,WAAO,OAAO,SAAS;AAAA,EACzB;AAAA,EAEA,MAAM,KACJ,QACA,SACA,aACA,SACwB;AACxB,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,gBAAgB,QAAQ,+CAA+C;AAInF,QAAI,CAAC,OAAO,SAAS;AACnB,UAAM,WAAW,MAAM,uBAAuB,OAAO,aAAa;AAAA,QAChE,MAAM,QAAQ,kBAAkB;AAAA,QAChC,KAAK,YAAY;AAAA,QACjB,SAAS,QAAQ;AAAA,QACjB,QAAQ,QAAQ;AAAA,MAClB,CAAC;AACD,aAAO,UAAU,SAAS;AAAA,IAC5B;AAGA,QAAM,OAAO,QAAQ,kBAAkB,WACjC,cAAc,MAAM,6BAA6B,YAAY,WAAW,OAAO,WAAW,GAC1F,WAAW,MAAM,0BAA0B,OAAO,WAAW,GAC7D,aAAa,MAAM,kBAAkB,OAAO,aAAa,OAAO,OAAO,GACvE,mBAAmB,CAAC,EAAE,eAAe,YAAY,aAEjD,gBAAgB,eAAe;AAQrC,QAPI,kBACF,OAAO,wBAAwB;AAAA,MAC7B,MAAM,cAAc;AAAA,MACpB,gBAAgB,cAAc;AAAA,IAChC,IAGE,CAAC,oBAAoB,SAAS,cAAc;AAC9C,UAAM,aAAa,MAAM,sBAAsB,OAAO,aAAa,OAAO,SAAS;AAAA,QACjF,SAAS,QAAQ;AAAA,QACjB,QAAQ,QAAQ;AAAA,QAChB,gBAAgB,SAAS;AAAA,QACzB;AAAA,MACF,CAAC;AACD,UAAI,CAAC,WAAW,SAAS;AACvB,YAAM,SAAS,WAAW,UAAU,WAC9B,UAAU,WAAW,WAAW;AACtC,cAAM,IAAI;AAAA,UACR;AAAA,UACA,WAAW,OAAO,WAAW,IAAI,OAAO,OAAO,kDAAkD,OAAO,aAAa,MAAM;AAAA,QAC7H;AAAA,MACF;AAAA,IACF,WAAW,oBAAoB,SAAS,kBAAkB;AACxD,UAAM,aAAa,MAAM,sBAAsB,OAAO,aAAa,OAAO,SAAS;AAAA,QACjF,SAAS,QAAQ;AAAA,QACjB,QAAQ,QAAQ;AAAA,QAChB,gBAAgB;AAAA,QAChB;AAAA,MACF,CAAC;AACD,UAAI,CAAC,WAAW,SAAS;AACvB,YAAM,SAAS,WAAW,UAAU,WAC9B,UAAU,WAAW,WAAW;AACtC,cAAM,IAAI;AAAA,UACR;AAAA,UACA,qBAAqB,OAAO,WAAW,IAAI,OAAO,OAAO,wBAAwB,OAAO,aAAa,MAAM;AAAA,QAC7G;AAAA,MACF;AAAA,IACF;AACA,QAAI,CAAC,oBAAoB,SAAS;AAChC,YAAM,IAAI;AAAA,QACR;AAAA,QACA,WAAW,OAAO,WAAW,IAAI,OAAO,OAAO;AAAA,MACjD;AAGF,QAAI;AAEF,UAAM,cAAc,MAAM,0BAA0B;AAAA,QAClD,KAAK,YAAY;AAAA,QACjB,aAAa,OAAO;AAAA,QACpB,SAAS,OAAO;AAAA,MAClB,CAAC,GAGK,eAAeA,MAAK,aAAa,iBAAiB;AAGxD,aAAO;AAAA,QACL,UAHe,MAAM,gBAAgB,YAAY;AAAA,QAIjD,aAAa,OAAO;AAAA,QACpB,SAAS,OAAO;AAAA,QAChB;AAAA,QACA,QAAQ;AAAA,MACV;AAAA,IACF,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR;AAAA,QACA,0BAA0B,OAAO,WAAW,IAAI,OAAO,OAAO;AAAA,QAC9D;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEF;;;AC9HA,SAAS,WAAAC,gBAAyB;;;ACclC,SAAS,QAAAC,QAAM,WAAAC,UAAS,WAAAC,UAAS,YAAAC,WAAU,cAAAC,aAAY,WAAW;;;ACPlE,SAAS,iBAAiB;AA0BnB,SAAS,uBAAuB,iBAAgC;AACrE,MAAM,WAAW,oBAAI,IAAY;AAGjC,MAAI,gBAAgB,QAAQ;AAC1B,aAAW,QAAQ,gBAAgB,OAAO;AACxC,sBAAgB,KAAK,MAAM,QAAQ;AAKvC,WAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,eAAe,GAAG;AAC1D,QAAI,QAAQ,YAAY,QAAQ,UAAW;AAE3C,QAAM,cAAc;AACpB,QAAI,YAAY;AACd,eAAW,QAAQ,YAAY;AAC7B,wBAAgB,KAAK,MAAM,QAAQ;AAAA,EAGzC;AAEA,SAAO,MAAM,KAAK,QAAQ;AAC5B;AAMA,SAAS,gBAAgB,MAAW,UAA6B;AAC/D,MAAI,OAAO,QAAS;AAClB,aAAS,IAAI,IAAI;AAAA,WACR,OAAO,QAAS,YAAY,SAAS,QAAQ,aAAa,QAAQ,OAAO,KAAK,WAAY;AAEnG,aAAS,IAAI,KAAK,OAAO;AAAA,WAChB,MAAM,QAAQ,IAAI;AAC3B,aAAW,KAAK;AACd,MAAI,OAAO,KAAM,WACf,SAAS,IAAI,CAAC,IACL,OAAO,KAAM,YAAY,MAAM,QAAQ,aAAa,KAAK,OAAQ,EAAU,WAAY,YAChG,SAAS,IAAK,EAAU,OAAO;AAAA,WAG1B,OAAO,QAAS,YAAY,KAAK,SAAS;AAEnD,QAAI,KAAK,QAAQ;AACf,eAAW,KAAK,KAAK,QAAQ;AAC3B,YAAI,OAAO,EAAE,SAAU;AACrB,mBAAS,IAAI,EAAE,KAAK;AAAA,iBACX,MAAM,QAAQ,EAAE,KAAK;AAC9B,mBAAW,KAAK,EAAE;AAChB,YAAI,OAAO,KAAM,WACf,SAAS,IAAI,CAAC,IACL,OAAO,KAAM,YAAY,MAAM,QAAQ,aAAa,KAAK,OAAQ,EAAU,WAAY,YAChG,SAAS,IAAK,EAAU,OAAO;AAAA,YAG9B,CAAI,OAAO,EAAE,SAAU,YAAY,EAAE,UAAU,QAAQ,aAAa,EAAE,SAAS,OAAQ,EAAE,MAAc,WAAY,YACxH,SAAS,IAAK,EAAE,MAAc,OAAO;AAI3C,IAAI,KAAK,QAAQ,YACX,OAAO,KAAK,QAAQ,WAAY,WAClC,SAAS,IAAI,KAAK,QAAQ,OAAO,IACxB,OAAO,KAAK,QAAQ,WAAY,YAAY,KAAK,QAAQ,YAAY,QAAQ,aAAa,KAAK,QAAQ,WAAW,OAAQ,KAAK,QAAQ,QAAgB,WAAY,YAC5K,SAAS,IAAK,KAAK,QAAQ,QAAgB,OAAO;AAAA,EAGxD;AACF;AAUO,SAAS,cAAc,cAAsB,UAAoC;AACtF,MAAM,UAA0B,CAAC,GAG3B,iBAAiB,aAAa,QAAQ,cAAc,EAAE,GACtD,WAAW,eAAe,MAAM,GAAG,EAAE,OAAO,OAAK,EAAE,SAAS,CAAC;AAEnE,MAAI,SAAS,WAAW;AACtB,WAAO;AAIT,WAAW,WAAW,UAAU;AAE9B,QAAM,oBAAoB,QAAQ,QAAQ,cAAc,EAAE;AAG1D,QAFwB,kBAAkB,MAAM,GAAG,EAAE,OAAO,OAAK,EAAE,SAAS,CAAC,EAEzD,WAAW;AAG/B,eAAS,aAAa,GAAG,aAAa,SAAS,QAAQ,cAAc;AACnE,YAAM,gBAAgB,SAAS,MAAM,UAAU,EAAE,KAAK,GAAG;AAGzD,YAAI,UAAU,eAAe,mBAAmB,EAAE,KAAK,GAAK,CAAC,GAAG;AAC9D,cAAM,WAAW,aAAa,IAAI,SAAS,MAAM,GAAG,UAAU,EAAE,KAAK,GAAG,IAAI;AAE5E,kBAAQ,KAAK;AAAA,YACX,SAAS;AAAA,YACT;AAAA,YACA,aAAa;AAAA,YACb;AAAA,UACF,CAAC;AAGD;AAAA,QACF;AAAA,MACF;AAAA,EACF;AAEA,gBAAO,MAAM,4BAA4B;AAAA,IACvC,cAAc;AAAA,IACd,YAAY,QAAQ;AAAA,IACpB,SAAS,QAAQ,IAAI,QAAM;AAAA,MACzB,SAAS,EAAE;AAAA,MACX,YAAY,EAAE;AAAA,MACd,UAAU,EAAE;AAAA,IACd,EAAE;AAAA,EACJ,CAAC,GAEM;AACT;AAcO,SAAS,mBAAmB,SAIjC;AACA,MAAI,QAAQ,WAAW;AACrB,UAAM,IAAI,MAAM,8CAA8C;AAGhE,MAAI,QAAQ,WAAW;AACrB,WAAO;AAAA,MACL,OAAO,QAAQ,CAAC;AAAA,MAChB,aAAa;AAAA,IACf;AAIF,MAAM,gBAAgB,KAAK,IAAI,GAAG,QAAQ,IAAI,OAAK,EAAE,UAAU,CAAC,GAG1D,iBAAiB,QAAQ,OAAO,OAAK,EAAE,eAAe,aAAa;AAEzE,SAAI,eAAe,WAAW,IACrB;AAAA,IACL,OAAO,eAAe,CAAC;AAAA,IACvB,aAAa;AAAA,EACf,IAIK;AAAA,IACL,OAAO,eAAe,CAAC;AAAA;AAAA,IACvB,aAAa;AAAA,IACb,kBAAkB;AAAA,EACpB;AACF;AAUO,SAAS,iBACd,cACA,UAKO;AACP,MAAM,UAAU,cAAc,cAAc,QAAQ;AAEpD,SAAI,QAAQ,WAAW,IACd,OAGF,mBAAmB,OAAO;AACnC;;;AC7KO,SAAS,sBACd,QACA,YACwB;AACxB,MAAI,CAAC;AACH,UAAM,IAAI;AAAA,MACR,WAAW,UAAU;AAAA,IACvB;AAIF,MAAI,OAAO,UAAW;AACpB,WAAO,4BAA4B,QAAQ,UAAU;AAIvD,MAAM,YAAY;AAElB,MAAI,CAAC,UAAU;AACb,UAAM,IAAI;AAAA,MACR,WAAW,UAAU;AAAA,IACvB;AAGF,UAAQ,UAAU,QAAQ;AAAA,IACxB,KAAK;AACH,aAAO,sBAAsB,WAA2B,UAAU;AAAA,IAEpE,KAAK;AACH,aAAO,sBAAsB,WAA2B,UAAU;AAAA,IAEpE;AACE,YAAM,IAAI;AAAA,QACR,WAAW,UAAU,mCAAoC,UAAkB,MAAM;AAAA,MAEnF;AAAA,EACJ;AACF;AAKA,SAAS,4BACPC,OACA,YACwB;AAExB,MAAIA,MAAK,SAAS,IAAI;AACpB,UAAM,IAAI;AAAA,MACR,WAAW,UAAU;AAAA,IACvB;AAIF,MAAIA,MAAK,WAAW,GAAG;AACrB,UAAM,IAAI;AAAA,MACR,WAAW,UAAU;AAAA,IACvB;AAMF,SAAO;AAAA,IACL,MAAM;AAAA,IACN,cAJqBA,MAAK,WAAW,IAAI,IAAIA,MAAK,UAAU,CAAC,IAAIA;AAAA,IAKjE,WAAWA;AAAA,EACb;AACF;AAKA,SAAS,sBACP,QACA,YACwB;AAExB,MAAI,CAAC,OAAO;AACV,UAAM,IAAI;AAAA,MACR,WAAW,UAAU;AAAA,IACvB;AAGF,MAAI,CAAC,OAAO,KAAK,SAAS,GAAG;AAC3B,UAAM,IAAI;AAAA,MACR,WAAW,UAAU,gEAAgE,OAAO,IAAI;AAAA,IAClG;AAGF,MAAM,QAAQ,OAAO,KAAK,MAAM,GAAG;AACnC,MAAI,MAAM,WAAW,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC;AAC7C,UAAM,IAAI;AAAA,MACR,WAAW,UAAU,gEAAgE,OAAO,IAAI;AAAA,IAClG;AAMF,SAAO;AAAA,IACL,MAAM;AAAA,IACN,QAJa,sBAAsB,OAAO,IAAI;AAAA,IAK9C,QAAQ,OAAO;AAAA,IACf,SAAS,OAAO;AAAA,IAChB,WAAW;AAAA,EACb;AACF;AAKA,SAAS,sBACP,QACA,YACwB;AAExB,MAAI,CAAC,OAAO;AACV,UAAM,IAAI;AAAA,MACR,WAAW,UAAU;AAAA,IACvB;AAIF,MAAI;AACF,gBAAY,OAAO,GAAG;AAAA,EACxB,SAAS,OAAO;AACd,UAAM,IAAI;AAAA,MACR,WAAW,UAAU,0BAA0B,OAAO,GAAG,YAC/C,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,IAClE;AAAA,EACF;AAEA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,QAAQ,OAAO;AAAA,IACf,QAAQ,OAAO;AAAA,IACf,SAAS,OAAO;AAAA,IAChB,WAAW;AAAA,EACb;AACF;AAKO,SAAS,qBAAqB,QAAyC;AAC5E,SAAO,OAAO,SAAS;AACzB;AAKO,SAAS,YAAY,QAAyC;AACnE,SAAO,OAAO,SAAS;AACzB;;;AFvMA,SAAS,QAAAC,aAAY;AAyCrB,eAAsB,WACpB,cACA,UACA,iBAC8B;AAE9B,MAAM,uBAAuBC,YAAW,YAAY,IAChD,eACAC,SAAQ,UAAU,YAAY,GAC5B,mBAAmBA,SAAQ,QAAQ,GAIrC,aAAa,sBACb,YAA4B,MAC5B,aAA6B;AACjC,MAAI;AACF,QAAM,IAAI,MAAMF,MAAK,oBAAoB;AACzC,gBAAY,EAAE,YAAY,GAC1B,aAAa,EAAE,OAAO,GACjB,EAAE,YAAY,MACjB,aAAaG,SAAQ,oBAAoB;AAAA,EAE7C,QAAQ;AAAA,EAER;AAEA,MAAM,eAAe,CAAC,YAChB,YAAY,mBAAyB,KAClC,QAAQ,WAAW,GAAG,gBAAgB,GAAG,GAAG,EAAE,GAKnD,kBAAiE,MAKjE,aAAa,YACb,cAAc;AAClB,SAAO,eAAe,eAAe,aAAa,UAAU,KAAG;AAE7D,QAAM,qBAAqBC,OAAK,YAAY,cAAc,eAAe;AACzE,QAAI,MAAM,OAAO,kBAAkB;AACjC,oBAAO,KAAK,qCAAqC,EAAE,MAAM,WAAW,CAAC,GAC9D;AAAA,QACL,MAAM;AAAA,QACN,WAAW;AAAA,MACb;AAIF,QAAM,kBAAkBA,OAAK,YAAY,oBAAoB,oBAAoB;AACjF,IAAI,MAAM,OAAO,eAAe,MAC9B,OAAO,KAAK,sCAAsC,EAAE,MAAM,WAAW,CAAC,GACtE,kBAAkB,EAAE,MAAM,YAAY,cAAc,gBAAgB;AAItE,QAAM,aAAaA,OAAK,YAAY,oBAAoB,eAAe;AACvE,QAAI,MAAM,OAAO,UAAU;AACzB,oBAAO,KAAK,iCAAiC,EAAE,MAAM,WAAW,CAAC,GAC1D;AAAA,QACL,MAAM;AAAA,QACN,WAAW;AAAA,MACb;AAGF,kBAAc,YACd,aAAaD,SAAQ,UAAU;AAAA,EACjC;AAMA,MAAI,mBAAmB,gBAAgB,CAACF,YAAW,YAAY;AAC7D,QAAI;AACF,UAAM,MAAM,MAAM,aAAa,gBAAgB,YAAY,GACrD,SAAS,KAAK,MAAM,GAAG,GACvB,UAAU,MAAM,QAAQ,OAAO,OAAO,IAAI,OAAO,UAAU,CAAC,GAE5D,eAAe,CAAC,UAA0B,MAAM,QAAQ,OAAO,GAAG,EAAE,QAAQ,UAAU,EAAE,EAAE,QAAQ,QAAQ,EAAE,GAC5G,qBAAqB,aAAa,YAAY,GAEhD,YAAyD;AAC7D,eAAW,KAAK,SAAS;AACvB,YAAM,SAAU,GAAW;AAC3B,YAAI,CAAC,OAAQ;AAGb,YAAI;AACJ,YAAI,OAAO,UAAW;AACpB,mBAAS;AAAA;AAOT,cAAI;AACF,gBAAM,aAAa,sBAAsB,QAAQ,GAAG,QAAQ,SAAS;AACrE,YAAI,WAAW,SAAS,oBACtB,SAAS,WAAW;AAAA,UAIxB,QAAQ;AAAA,UAER;AAGF,YAAI,CAAC,OAAQ;AACb,YAAM,MAAM,aAAa,MAAM;AAC/B,QAAK,QACD,uBAAuB,OAAO,mBAAmB,WAAW,GAAG,GAAG,GAAG,OACnE,CAAC,aAAa,IAAI,SAAS,UAAU,IAAI,YAC3C,YAAY,EAAE,KAAK,YAAY,GAAG,KAAK;AAAA,MAG7C;AAEA,UAAI,WAAW;AACb,YAAM,eAAeC,SAAQ,kBAAkB,UAAU,GAAG;AAC5D,sBAAO,KAAK,+CAA+C,EAAE,MAAM,cAAc,QAAQ,UAAU,YAAY,KAAK,UAAU,IAAI,CAAC,GAC5H;AAAA,UACL,MAAM;AAAA,UACN,WAAW;AAAA,QACb;AAAA,MACF;AAAA,IACF,QAAgB;AAAA,IAEhB;AAIF,MAAM,gBAAgB,MAAM,uBAAuB,cAAc,UAAU,eAAe;AAC1F,MAAI,cAAc,cAAc;AAC9B,WAAO;AAQT,MAAI,gBAAgB,CAACD,YAAW,YAAY,GAAG;AAC7C,QAAM,aAAaC,SAAQ,kBAAkB,YAAY;AACzD,QAAI,MAAM,OAAO,UAAU,GAAG;AAC5B,UAAI;AACJ,UAAI;AAEF,sBADU,MAAMF,MAAK,UAAU,GAChB,YAAY,IAAI,aAAaG,SAAQ,UAAU;AAAA,MAChE,QAAQ;AACN,qBAAa;AAAA,MACf;AACA,oBAAO,KAAK,qDAAqD;AAAA,QAC/D;AAAA,QACA,MAAM;AAAA,MACR,CAAC,GACM;AAAA,QACL,MAAM;AAAA,QACN,WAAW;AAAA,MACb;AAAA,IACF;AAAA,EACF;AAGA,SAAI,kBACK;AAAA,IACL,MAAM,gBAAgB;AAAA,IACtB,WAAW;AAAA,IACX,cAAc,gBAAgB;AAAA,EAChC,IAGK;AACT;AAUA,eAAe,uBACb,cACA,UACA,iBAC8B;AAE9B,MAAM,WAAW,uBAAuB,eAAe,GAGjD,SAAS,iBAAiB,cAAc,QAAQ;AAEtD,MAAI,CAAC;AACH,kBAAO,KAAK,mCAAmC,EAAE,aAAa,CAAC,GACxD;AAAA,MACL,MAAM;AAAA,MACN,WAAW;AAAA,IACb;AAIF,MAAM,WAAW,OAAO,MAAM,WAC1BD,SAAQ,UAAU,OAAO,MAAM,QAAQ,IACvC;AAEJ,SAAI,OAAO,eAAe,OAAO,oBAC/B,OAAO,KAAK,2BAA2B;AAAA,IACrC;AAAA,IACA,YAAY,OAAO,iBAAiB;AAAA,IACpC,UAAU,OAAO,iBAAiB,IAAI,OAAK,EAAE,OAAO;AAAA,EACtD,CAAC,GAEM;AAAA,IACL,MAAM;AAAA,IACN,WAAW;AAAA,IACX,gBAAgB,OAAO,MAAM;AAAA,IAC7B,kBAAkB,OAAO,iBAAiB,IAAI,QAAM;AAAA,MAClD,SAAS,EAAE;AAAA,MACX,MAAM,EAAE,WAAWA,SAAQ,UAAU,EAAE,QAAQ,IAAI;AAAA,MACnD,YAAY,EAAE;AAAA,IAChB,EAAE;AAAA,EACJ,MAGF,OAAO,KAAK,sCAAsC;AAAA,IAChD;AAAA,IACA,MAAM;AAAA,IACN,SAAS,OAAO,MAAM;AAAA,IACtB,YAAY,OAAO,MAAM;AAAA,EAC3B,CAAC,GAEM;AAAA,IACL,MAAM;AAAA,IACN,gBAAgB,OAAO,MAAM;AAAA,IAC7B,WAAW;AAAA,EACb;AACF;AAUA,eAAsB,sBACpB,cACA,iBAC8B;AAI9B,MAAI,cAAc;AAIlB,GAHU,MAAMF,MAAK,YAAY,GAG1B,YAAY,MACjB,cAAcG,SAAQ,YAAY;AAIpC,MAAM,iBAAiB,MAAM,kBAAkB,WAAW;AAC1D,SAAI,kBAQG,MAAM,iCAAiC,cAAc,eAAe;AAC7E;AAKA,eAAe,kBAAkB,SAAsD;AAErF,MAAM,iBAAiBC,OAAK,SAAS,cAAc,eAAe;AAClE,MAAI,MAAM,OAAO,cAAc;AAC7B,WAAO;AAAA,MACL,MAAM;AAAA,MACN,WAAW;AAAA,IACb;AAIF,MAAM,cAAcA,OAAK,SAAS,oBAAoB,oBAAoB;AAC1E,MAAI,MAAM,OAAO,WAAW;AAC1B,WAAO;AAAA,MACL,MAAM;AAAA,MACN,WAAW;AAAA,MACX,cAAc;AAAA,IAChB;AAIF,MAAM,SAASA,OAAK,SAAS,oBAAoB,eAAe;AAChE,SAAI,MAAM,OAAO,MAAM,IACd;AAAA,IACL,MAAM;AAAA,IACN,WAAW;AAAA,EACb,IAGK;AACT;AAMA,eAAe,iCACb,cACA,iBAC8B;AAC9B,MAAM,WAAW,uBAAuB,eAAe,GAEnD,cAAc;AAIlB,GAHU,MAAMJ,MAAK,YAAY,GAG1B,YAAY,MACjB,cAAcG,SAAQ,YAAY;AAIpC,MAAI,eAAe;AACnB,SAAO,gBAAgB,gBAAc;AAEnC,QAAM,eAAeE,UAAS,aAAa,YAAY;AAEvD,QAAI,CAAC,gBAAgB,iBAAiB,KAAK;AAEzC,qBAAe,aACf,cAAcF,SAAQ,WAAW;AACjC;AAAA,IACF;AAGA,QAAM,SAAS,iBAAiB,cAAc,QAAQ;AAEtD,QAAI,QAAQ;AAEV,UAAM,WAAW,OAAO,MAAM,WAC1BD,SAAQ,aAAa,OAAO,MAAM,QAAQ,IAC1C;AAEJ,aAAI,OAAO,eAAe,OAAO,mBACxB;AAAA,QACL,MAAM;AAAA,QACN,WAAW;AAAA,QACX,gBAAgB,OAAO,MAAM;AAAA,QAC7B,kBAAkB,OAAO,iBAAiB,IAAI,QAAM;AAAA,UAClD,SAAS,EAAE;AAAA,UACX,MAAM,EAAE,WAAWA,SAAQ,aAAa,EAAE,QAAQ,IAAI;AAAA,UACtD,YAAY,EAAE;AAAA,QAChB,EAAE;AAAA,MACJ,IAGK;AAAA,QACL,MAAM;AAAA,QACN,gBAAgB,OAAO,MAAM;AAAA,QAC7B,WAAW;AAAA,MACb;AAAA,IACF;AAGA,mBAAe,aACf,cAAcC,SAAQ,WAAW;AAAA,EACnC;AAGA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,WAAW;AAAA,EACb;AACF;;;AG9aO,SAAS,0BACd,cACA,iBACQ;AAER,MAAM,WAAW,uBAAuB,eAAe,GACjD,iBAAiB,MAAM,KAAK,IAAI,IAAI,QAAQ,CAAC,GAG7C,eAAe,aAAa,MAAM,GAAG,EAAE,OAAO,OAAK,EAAE,SAAS,CAAC,GAC/D,cAAwB,CAAC;AAG/B,EAAI,aAAa,KAAK,OAAK,MAAM,YAAY,EAAE,SAAS,OAAO,CAAC,KAC9D,YAAY,KAAK,8FAAyF,GAGxG,aAAa,KAAK,OAAK,MAAM,YAAY,EAAE,SAAS,OAAO,CAAC,KAC9D,YAAY,KAAK,0FAAqF,GAGpG,aAAa,KAAK,OAAK,MAAM,WAAW,EAAE,SAAS,MAAM,CAAC,KAC5D,YAAY,KAAK,2FAAsF,GAGrG,aAAa,KAAK,OAAK,MAAM,cAAc,EAAE,SAAS,SAAS,CAAC,KAClE,YAAY,KAAK,iGAA4F;AAI/G,MAAI,UAAU,SAAS,YAAY;AAAA;AAAA;AACnC,aAAW;AAAA;AAGX,MAAM,iBAAiB;AAAA,IACrB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,WAAW,WAAW;AACpB,IAAI,eAAe,SAAS,OAAO,MACjC,WAAW,YAAO,OAAO;AAAA;AAK7B,MAAM,gBAAgB,eAAe,OAAO,OAAK,CAAC,eAAe,SAAS,CAAC,CAAC;AAC5E,MAAI,cAAc,SAAS,KAAK,cAAc,UAAU;AACtD,aAAW,WAAW;AACpB,iBAAW,YAAO,OAAO;AAAA;AAAA,MAEtB,CAAI,cAAc,SAAS,MAChC,WAAW,oBAAe,cAAc,MAAM;AAAA;AAGhD,SAAI,YAAY,SAAS,MACvB,WAAW;AAAA;AAAA,GACX,WAAW,YAAY,KAAK;AAAA,CAAI,IAG3B;AACT;;;AJhEO,IAAM,mBAAN,MAAsD;AAAA,EAC3D,UAAU,QAAgC;AACxC,WAAO,OAAO,SAAS;AAAA,EACzB;AAAA,EAEA,MAAM,KACJ,QACA,SACA,aACwB;AACxB,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,gBAAgB,QAAQ,yCAAyC;AAG7E,QAAI;AAEF,UAAM,eAAeG,SAAQ,YAAY,WAAW,OAAO,SAAS,GAGhE,mBAAwB;AAC5B,UAAI,OAAO,cAAc;AAGvB,YAAM,eAAeA,SAAQ,cAAc,OAAO,YAAY;AAC9D,2BAAmB;AAAA,UACjB,WAAW;AAAA,UACX,MAAM;AAAA,UACN,cAAc,OAAO;AAAA,UACrB,gBAAgB;AAAA,QAClB,GAEA,OAAO,eAAe,cAEtB,OAAO,KAAK,4CAA4C;AAAA,UACtD,MAAM,OAAO;AAAA,UACb;AAAA,QACF,CAAC;AAAA,MACH,WAAW,OAAO,cAAc;AAG9B,YAAM,kBADiB,kBAAkB,YAAY,SAAS,EACvB,QAIjC,gCAAgCA,SAAQ,cAAc,OAAO,YAAY;AAa/E,YAZA,mBAAmB,MAAM;AAAA,UACtB,MAAM,OAAO,6BAA6B,IAAK,gCAAgC;AAAA,UAChF;AAAA,QACF,GAEA,OAAO,KAAK,yCAAyC;AAAA,UACnD,WAAW,iBAAiB;AAAA,UAC5B,MAAM,iBAAiB;AAAA,UACvB,gBAAgB,iBAAiB;AAAA,QACnC,CAAC,GAGG,iBAAiB,cAAc,QAAQ;AACzC,cAAM,eAAe,OAAO,gBAAgB,OAAO,aAAa,IAC1D,eAAe,0BAA0B,cAAc,eAAe;AAC5E,gBAAM,IAAI,gBAAgB,QAAQ,YAAY;AAAA,QAChD;AAGA,QAAI,iBAAiB,SACnB,OAAO,eAAe,iBAAiB;AAAA,MAE3C;AAGA,UAAM,cAAc,kBAAkB,QAAQ,cAGxC,kBAAkB,MAAM,iBAAiB,WAAW;AAG1D,UAAI,kBAAkB,cAAc;AAClC,eAAO;AAAA,UACL,UAAU;AAAA,UACV,aAAa;AAAA,UACb,SAAS;AAAA,UACT;AAAA,UACA,QAAQ;AAAA,UACR,gBAAgB;AAAA,YACd,UAAU;AAAA,YACV,YAAY;AAAA,YACZ,cAAc,gBAAgB,gBAAgB,kBAAkB;AAAA,UAClE;AAAA,UACA,gBAAgB;AAAA;AAAA;AAAA;AAAA,YAId,GAAI,OAAO,eAAe,EAAE,UAAU,aAAa,IAAI,CAAC;AAAA,YACxD,eAAe;AAAA,UACjB;AAAA,QACF;AAKF,UAAM,cAAmB;AAAA,QACvB,UAAU;AAAA,QACV,kBAAkB,OAAO,gBAAgB;AAAA,QACzC,cAAc,OAAO;AAAA,MACvB;AAEA,MAAI,OAAO,sBACT,YAAY,SAAS,OAAO,kBAAkB,QAC9C,YAAY,OAAO,OAAO,kBAAkB;AAI9C,UAAI,gBAAgB,MAAM,oBAAoB,aAAa,WAAW,GAEhE,cAAc,cAAc,SAAS,MACrC,UAAU,cAAc,SAAS,WAAW;AAGlD,aAAO;AAAA,QACL,UAAU,cAAc;AAAA,QACxB;AAAA,QACA;AAAA,QACA;AAAA,QACA,QAAQ;AAAA,QACR,gBAAgB,gBAAgB,WAAW;AAAA,UACzC,UAAU;AAAA,UACV,YAAY,gBAAgB;AAAA;AAAA,QAC9B,IAAI;AAAA,QACJ,gBAAgB;AAAA;AAAA;AAAA;AAAA,UAId,GAAI,OAAO,eAAe,EAAE,UAAU,aAAa,IAAI,CAAC;AAAA,UACxD,eAAe;AAAA,QACjB;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR;AAAA,QACA,qCAAqC,OAAO,SAAS;AAAA,QACrD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEF;;;AKjKA,SAAS,gBAAgB;AACzB,SAAS,QAAAC,cAAY;AACrB,SAAS,iBAAiB;AAC1B,SAAS,IAAI,cAAc;AAa3B,SAAS,iBAAiB,QAAqC;AAC7D,SAAI,SACK,OAAO,QAAQ,IAGjB;AAAA,IACL,MAAM,SAAiB;AAAE,aAAO,MAAM,OAAO;AAAA,IAAG;AAAA,IAChD,KAAK,eAAwB;AAAA,IAAc;AAAA,IAC3C,QAAQ,MAAc;AAAE,aAAO,MAAM,IAAI;AAAA,IAAG;AAAA,EAC9C;AACF;AAcA,IAAM,gBAAgB,UAAU,QAAQ,GAClC,eAAe,mBAAmB;AAiBxC,SAAS,MAAM,KAAsB;AACnC,SAAO,oBAAoB,KAAK,GAAG;AACrC;AAEA,SAAS,UAAU,KAAsB;AACvC,SAAO,kBAAkB,KAAK,GAAG;AACnC;AAEA,eAAe,OAAO,MAAgB,KAA+B;AACnE,MAAI;AAEF,YADe,MAAM,cAAc,OAAO,MAAM,EAAE,IAAI,CAAC,GACzC,OAAO,KAAK;AAAA,EAC5B,SAAS,OAAY;AACnB,QAAM,UAAU,OAAO,QAAQ,WAAW,EAAE,OAAO,KAAK,OAAO,WAAW,OAAO,KAAK;AACtF,UAAM,IAAI,gBAAgB,uBAAuB,OAAO,EAAE;AAAA,EAC5D;AACF;AAKA,eAAe,oBAAoB,UAAmC;AAEpE,UADgB,MAAM,OAAO,CAAC,aAAa,MAAM,GAAG,QAAQ,GAC7C,UAAU,GAAG,CAAC;AAC/B;AAQA,eAAe,uBAAuB,KAAa,KAAsC;AACvF,MAAI;AAEF,QAAM,SAAS,MAAM,OAAO,CAAC,aAAa,KADxB,OAAO,MAC+B,CAAC;AACzD,QAAI,CAAC;AACH,aAAO;AAET,QAAM,QAAQ,OAAO,MAAM,oBAAoB;AAC/C,WAAO,QAAQ,MAAM,CAAC,EAAE,UAAU,GAAG,CAAC,IAAI;AAAA,EAC5C,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,IAAM,qBAAqB;AAM3B,SAAS,eAAe,KAAsB;AAC5C,SAAO,UAAU,GAAG,KAAK,mBAAmB,KAAK,GAAG;AACtD;AAWA,eAAsB,iBAAiB,SAAmD;AACxF,MAAM,EAAE,KAAK,KAAK,QAAQ,WAAW,QAAQ,SAAS,gBAAgB,IAAI,SAGpE,gBAAgB,MAAM;AAC1B,QAAM,QAAQ,IAAI,MAAM,4BAA4B;AACpD,WAAO,QAAQ,MAAM,CAAC,IAAI;AAAA,EAC5B,GAGM,WAAW,OAAO,UAAkB,WAAmD;AAC3F,QAAI,MAAM,eAAe,KAAK,QAAQ,GAAG;AACvC,UAAM,YAAY,qBAAqB,KAAK,QAAQ;AACpD,YAAM,gBAAgB,SAAS;AAC/B,UAAM,YAAY,SAASC,OAAK,WAAW,MAAM,IAAI;AACrD,UAAI,CAAC,UAAU,MAAM,OAAO,SAAS,GAAG;AACtC,eAAO,MAAM,wBAAwB,MAAM,KAAK,EAAE,KAAK,KAAK,QAAQ,SAAS,CAAC;AAE9E,YAAM,aAAa,MAAM,IAAI,GAAG,KAAK,IAC/B,gBAAgB,SAAS,IAAI,MAAM,KAAK;AAC9C,sBAAO,KAAK,gBAAgB,cAAc,CAAC,GAAG,UAAU,GAAG,aAAa,KAAK,QAAQ,GAAG,GACjF,EAAE,MAAM,WAAW,WAAW,UAAU,UAAU,UAAU;AAAA,MACrE;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAGA,MAAI,CAAC,WAAW;AAEd,QAAI,OAAO,UAAU,GAAG,GAAG;AACzB,UAAM,WAAW,IAAI,UAAU,GAAG,CAAC,GAC7B,SAAS,MAAM,SAAS,UAAU,UAAU;AAClD,UAAI,OAAQ,QAAO;AAAA,IACrB;AAGA,QAAI,OAAO,eAAe,GAAG,KAAK,CAAC,UAAU,GAAG,GAAG;AACjD,UAAM,eAAe,MAAM,aAAa,sBAAsB,KAAK,GAAG;AACtE,UAAI,cAAc;AAChB,YAAM,SAAS,MAAM,SAAS,cAAc,qBAAqB;AACjE,YAAI,OAAQ,QAAO;AAAA,MACrB;AAAA,IACF;AAKA,QAAM,aAAa,MAAM,IAAI,GAAG,KAAK,IAC/B,uBAAwB,kBAA6C,SAA3B,iBAAiB,MAAM;AAEvE,IAAI,kBACF,gBAAgB,QAAQ,YAAY,cAAc,CAAC,GAAG,UAAU,EAAE,IAElE,qBAAsB,MAAM,YAAY,cAAc,CAAC,GAAG,UAAU,EAAE;AAGxE,QAAM,cAAc,MAAM,uBAAuB,KAAK,GAAG;AAGzD,QAFA,sBAAsB,KAAK,GAEvB,aAAa;AAEf,MAAI,OACF,MAAM,aAAa,eAAe,KAAK,KAAK,WAAW;AAGzD,UAAM,SAAS,MAAM,SAAS,aAAa,WAAW;AACtD,UAAI,OAAQ,QAAO;AAGnB,aAAO,MAAM,iCAAiC,EAAE,KAAK,KAAK,YAAY,CAAC;AAAA,IACzE;AAAA,EACF;AAGA,MAAM,UAAU,mBAAmB,GAAG;AACtC,QAAM,UAAU,OAAO,GAGvB,MAAM,kBAAkB,SAAS;AAAA,IAC/B;AAAA,IACA,YAAY,IAAI,YAAY;AAAA,IAC5B,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,EACtC,CAAC;AAKD,MAAM,gBAAgBA;AAAA,IACpB;AAAA,IACA,eAAe,QAAQ,GAAG,IAAI,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,EACrF;AAEA,SAAO,MAAM,+BAA+B,EAAE,KAAK,KAAK,OAAO,CAAC;AAKhE,MAAM,kBAAkB,MAAM,IAAI,GAAG,KAAK,IACpC,eAAgB,kBAA6C,SAA3B,iBAAiB,MAAM,GACzD,UAAU,mBAAmB;AAEnC,EAAI,kBACF,QAAQ,QAAQ,WAAW,cAAc,CAAC,GAAG,eAAe,EAAE,IAE9D,QAAQ,MAAM,WAAW,cAAc,CAAC,GAAG,eAAe,EAAE;AAG9D,MAAI;AAEF,IAAI,OAAO,MAAM,GAAG,KAElB,MAAM,OAAO,CAAC,SAAS,WAAW,KAAK,KAAK,aAAa,CAAC,GAC1D,QAAQ,QAAQ,mBAAmB,GAAG,EAAE,GACxC,MAAM,OAAO,CAAC,SAAS,WAAW,KAAK,UAAU,GAAG,GAAG,aAAa,GACpE,QAAQ,QAAQ,uBAAuB,GAAG,EAAE,GAC5C,MAAM,OAAO,CAAC,YAAY,GAAG,GAAG,aAAa,KACpC,MAET,MAAM,OAAO,CAAC,SAAS,WAAW,KAAK,YAAY,KAAK,KAAK,aAAa,CAAC,IAG3E,MAAM,OAAO,CAAC,SAAS,WAAW,KAAK,KAAK,aAAa,CAAC,GAI5D,QAAQ,QAAQ,sBAAsB;AACtC,QAAM,YAAY,MAAM,oBAAoB,aAAa,GACnD,YAAY,qBAAqB,KAAK,SAAS;AAGrD,QAAI,MAAM,eAAe,KAAK,SAAS,GAAG;AACxC,aAAO,MAAM,yCAAyC,EAAE,WAAW,UAAU,CAAC,GAE9E,cAAc,KAAK,GAGnB,MAAM,GAAG,eAAe,EAAE,WAAW,IAAM,OAAO,GAAK,CAAC,GAGxD,MAAM,gBAAgB,SAAS;AAG/B,UAAMC,aAAY,SAASD,OAAK,WAAW,MAAM,IAAI;AACrD,UAAI,UAAU,CAAE,MAAM,OAAOC,UAAS;AACpC,cAAM,IAAI;AAAA,UACR,iBAAiB,MAAM,yCAAyC,GAAG;AAAA,QACrE;AAGF,aAAO;AAAA,QACL,MAAMA;AAAA,QACN;AAAA,QACA,UAAU;AAAA,MACZ;AAAA,IACF;AAIA,QAAI;AACF,YAAM,OAAO,eAAe,SAAS;AAAA,IACvC,SAAS,OAAY;AACnB,UAAM,OAAO,OAAO;AACpB,UAAI,SAAS,YAAY,SAAS;AAEhC,cAAM,GAAG,eAAe,EAAE,WAAW,IAAM,OAAO,GAAK,CAAC;AAAA;AAExD,cAAM;AAAA,IAEV;AAEA,WAAO,MAAM,uCAAuC,EAAE,UAAU,CAAC,GAG7D,MAAM,eAAe,KAAK,SAAS,KACrC,MAAM,gBAAgB,SAAS,GAIjC,MAAM,oBAAoB,WAAW;AAAA,MACnC;AAAA,MACA,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,MACA,WAAU,oBAAI,KAAK,GAAE,YAAY;AAAA,MACjC,eAAc,oBAAI,KAAK,GAAE,YAAY;AAAA,IACvC,CAAC,GAGG,OACF,MAAM,aAAa,eAAe,KAAK,KAAK,SAAS;AAIvD,QAAM,YAAY,SAASD,OAAK,WAAW,MAAM,IAAI;AACrD,QAAI,UAAU,CAAE,MAAM,OAAO,SAAS;AACpC,YAAM,IAAI;AAAA,QACR,iBAAiB,MAAM,yCAAyC,GAAG;AAAA,MACrE;AAIF,QAAM,eAAeA,OAAK,WAAW,cAAc,eAAe,GAC5D,cAAc,MAAM,OAAO,YAAY,GAEvC,qBAAqBA,OAAK,WAAW,aAAa,eAAe,cAAc,WAAW,GAC1F,oBAAoB,MAAM,OAAO,kBAAkB,GAEnD,0BAA0BA,OAAK,WAAW,aAAa,eAAe,cAAc,gBAAgB,GACpG,yBAAyB,MAAM,OAAO,uBAAuB;AAEnE,QAAI,CAAC,eAAe,CAAC,qBAAqB,CAAC;AACzC,YAAM,IAAI;AAAA,QACR,0EACY,cAAc,eAAe,KAAK,aAAa,aAAa,IAAI,cAAc,WAAW,QAAQ,aAAa,aAAa,IAAI,cAAc,gBAAgB,OACnK,SAAS,WAAW,MAAM,MAAM,iBAAiB;AAAA,MACzD;AAGF,QAAM,UAAU,MAAM,IAAI,GAAG,KAAK,IAC5B,aAAa,SAAS,iBAAiB,MAAM,KAAK;AAExD,WAAI,kBACF,QAAQ,QAAQ,UAAU,cAAc,CAAC,GAAG,OAAO,KAAK,SAAS,GAAG,IAEpE,aAAc,KAAK,UAAU,cAAc,CAAC,GAAG,OAAO,KAAK,SAAS,GAAG,GAGzE,OAAO,KAAK,yBAAyB,GAAG,GAAG,OAAO,GAAG,UAAU,cAAc,SAAS,GAAG,GAElF;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA,UAAU;AAAA,IACZ;AAAA,EAEF,SAAS,OAAO;AACd,wBAAc,KAAK,GAEf,MAAM,OAAO,aAAa,KAC5B,MAAM,GAAG,eAAe,EAAE,WAAW,IAAM,OAAO,GAAK,CAAC,GAEpD;AAAA,EACR;AACF;;;ACxVA,eAAsB,mBAAmB,SAA+D;AACtG,MAAM,cAAc,MAAM,iBAAiB;AAAA,IACzC,KAAK,QAAQ;AAAA,IACb,KAAK,QAAQ;AAAA,IACb,QAAQ,QAAQ;AAAA,IAChB,WAAW,QAAQ;AAAA,IACnB,SAAS,QAAQ;AAAA,EACnB,CAAC,GAEK,EAAE,MAAM,YAAY,UAAU,UAAU,IAAI;AAKlD,MAAI,QAAQ;AACV,WAAO;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA,eAAe;AAAA,IACjB;AAKF,MAAM,kBAAkB,MAAM,iBAAiB,UAAU;AACzD,SAAI,gBAAgB,YAAY,gBAAgB,SAAS,gBAChD;AAAA,IACL,KAAK;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,EACjB,IAYK;AAAA,IACL,KARU,MAAM,oBAAoB,YAAY;AAAA,MAChD,QAAQ,QAAQ;AAAA,MAChB,MAAM,QAAQ;AAAA,MACd,cAAc,QAAQ;AAAA,MACtB;AAAA,IACF,CAAC;AAAA,IAIC;AAAA,IACA;AAAA,IACA;AAAA,IACA,eAAe;AAAA,EACjB;AACF;;;ACjEA,SAAS,QAAAE,aAAY;AACrB,SAAS,WAAAC,UAAS,WAAAC,gBAAe;AAK1B,IAAM,kBAAN,MAAqD;AAAA,EAC1D,UAAU,QAAgC;AACxC,WAAO,OAAO,SAAS;AAAA,EACzB;AAAA,EAEA,MAAM,KACJ,QACA,SACA,aACA,SACwB;AACxB,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,gBAAgB,QAAQ,qCAAqC;AAGzE,QAAI;AAGF,UAAM,YAAY,QAAQ,mBAAmB,kBACvC,SAAS,MAAM,mBAAmB;AAAA,QACtC,KAAK,OAAO;AAAA,QACZ,KAAK,OAAO;AAAA,QACZ,MAAM,OAAO;AAAA,QACb,cAAc,OAAO;AAAA,QACrB;AAAA,QACA;AAAA,MACF,CAAC,GAGG,mBAAwB;AAC5B,UAAI,OAAO,cAAc;AAEvB,YAAM,eAAeD,SAAQ,OAAO,UAAU,OAAO,YAAY;AACjE,2BAAmB;AAAA,UACjB,WAAW;AAAA,UACX,MAAM;AAAA,UACN,cAAc,OAAO;AAAA,UACrB,gBAAgB;AAAA,QAClB,GAEA,OAAO,eAAe,cAEtB,OAAO,KAAK,2CAA2C;AAAA,UACrD,MAAM,OAAO;AAAA,UACb;AAAA,QACF,CAAC;AAAA,MACH,WAAW,OAAO,gBAAgB,OAAO,SAAS;AAGhD,YAAM,kBADiB,kBAAkB,YAAY,SAAS,EACvB,QACjC,eAAe,OAAO,gBAAgB,OAAO,WAAW;AAE9D,2BAAmB,MAAM;AAAA,UACvB;AAAA,UACA,OAAO;AAAA,UACP;AAAA,QACF,GAEA,OAAO,KAAK,wCAAwC;AAAA,UAClD,WAAW,iBAAiB;AAAA,UAC5B,MAAM,iBAAiB;AAAA,UACvB,gBAAgB,iBAAiB;AAAA,QACnC,CAAC,GAGG,iBAAiB,SACnB,OAAO,eAAe,iBAAiB;AAAA,MAE3C;AAIA,UACE,CAAC,OAAO,iBACP,OAAO,iBAAiB,kBAAkB,cAAc,gBACzD;AACA,YAAME,mBAAkB,MAAM,iBAAiB,OAAO,UAAU;AAEhE,eAAO;AAAA,UACL,UAAU;AAAA;AAAA,UACV,aAAa;AAAA;AAAA,UACb,SAAS;AAAA,UACT,aAAa,OAAO;AAAA,UACpB,QAAQ;AAAA,UACR,gBAAgB;AAAA,YACd,UAAU;AAAA,YACV,YAAY;AAAA,YACZ,cAAcA,iBAAgB,gBAAgB,kBAAkB;AAAA,UAClE;AAAA,UACA,gBAAgB;AAAA,YACd,UAAU,OAAO;AAAA,YACjB,WAAW,OAAO;AAAA,YAClB,eAAe;AAAA,UACjB;AAAA,QACF;AAAA,MACF;AAsBA,UAAI;AACJ,UAAI,OAAO,cAAc;AACvB,YAAM,eAAeF,SAAQ,OAAO,UAAU,OAAO,YAAY,GAC3D,eAAe,kBAAkB;AAEvC,YAAI,iBACF,aAAa,WAAW,YAAY;AAAA;AAAA,QAElC,iBAAiB,OAAO;AAAA,QACxB,aAAa,WAAW,eAAe,GAAG;AAI5C,wBAAc;AAAA,iBACL,gBAAgB,iBAAiB,OAAO;AAIjD,wBAAc;AAAA;AAId,cAAI;AAEF,2BADU,MAAMD,MAAK,YAAY,GACjB,YAAY,IAAI,eAAeE,SAAQ,YAAY;AAAA,UACrE,QAAQ;AAEN,0BAAcA,SAAQ,YAAY;AAAA,UACpC;AAGF,eAAO,KAAK,0CAA0C;AAAA,UACpD,cAAc,OAAO;AAAA,UACrB;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAEE,sBAAc,kBAAkB,QAAQ,OAAO;AAIjD,UAAI,gBAAgB,MAAM,oBAAoB,aAAa;AAAA,QACzD,QAAQ,OAAO;AAAA,QACf,MAAM,OAAO;AAAA,QACb,cAAc,OAAO;AAAA,QACrB,UAAU,OAAO;AAAA,QACjB,kBAAkB,OAAO,gBAAgB;AAAA,MAC3C,CAAC,GAGK,kBAAkB,MAAM,iBAAiB,WAAW,GAMpD,sBAAsB,OAAO,gBAAgB,gBAAgB,SAAS,eAEtE,cAAc,cAAc,SAAS,MACrC,UAAU,cAAc,SAAS,WAAW;AAGlD,aAAO;AAAA,QACL,UAAU,cAAc;AAAA,QACxB;AAAA,QACA;AAAA,QACA;AAAA,QACA,QAAQ;AAAA,QACR,gBAAiB,gBAAgB,YAAY,CAAC,sBAAuB;AAAA,UACnE,UAAU;AAAA,UACV,YAAY,gBAAgB;AAAA;AAAA,UAC5B,cAAc,gBAAgB;AAAA,QAChC,IAAI;AAAA,QACJ,gBAAgB;AAAA,UACd,UAAU,OAAO;AAAA,UACjB,WAAW,OAAO;AAAA,UAClB,eAAe;AAAA,QACjB;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,UAAI,iBAAiB;AACnB,cAAM;AAER,UAAM,MAAM,OACN,MAAM,OAAO,SAAS,IAAI,OAAO,MAAM,KAAK,IAC5C,SAAS,OAAO,UAAU,WAAW,OAAO,OAAO,MAAM,IACzD,WAAW,KAAK,UAAU,MAAM,IAAI,OAAO,KAAK;AACtD,YAAM,IAAI;AAAA,QACR;AAAA,QACA,oCAAoC,OAAO,MAAM,GAAG,GAAG,GAAG,MAAM,GAAG,QAAQ;AAAA,QAC3E;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEF;;;ACtOA,SAAS,YAAYE,WAAU;AAC/B,SAAS,WAAAC,UAAS,QAAAC,cAAY;AAC9B,YAAYC,WAAU;AAUtB,IAAM,iBAAiB;AAOhB,SAAS,sBAAsB,WAA2B;AAC/D,SAAOC,OAAK,uBAAuB,SAAS,GAAG,cAAc,qBAAqB;AACpF;AAEA,SAAS,qBAAqB,QAA4B;AACxD,SAAO,MAAM,KAAK,IAAI,IAAI,MAAM,CAAC,EAAE,KAAK;AAC1C;AAEA,SAAS,iBAAiB,OAAqD;AAC7E,MAAM,SAAgC,CAAC,GACjC,OAAO,OAAO,KAAK,KAAK,EAAE,KAAK;AACrC,WAAW,OAAO,MAAM;AACtB,QAAM,SAAS,MAAM,GAAG,KAAK,CAAC;AAG9B,QADmB,OAAO,KAAK,OAAK,OAAO,KAAM,YAAY,MAAM,IAAI,GACvD;AAEd,UAAM,eAAe,OAAO,KAAK,CAAC,GAAG,MAAM;AACzC,YAAM,UAAU,OAAO,KAAM,WAAW,IAAI,EAAE,QACxC,UAAU,OAAO,KAAM,WAAW,IAAI,EAAE;AAC9C,eAAO,QAAQ,cAAc,OAAO;AAAA,MACtC,CAAC,GACK,OAAO,oBAAI,IAAY;AAC7B,aAAO,GAAG,IAAI,aAAa,OAAO,UAAQ;AACxC,YAAM,SAAS,OAAO,QAAS,WAAW,OAAO,KAAK;AACtD,eAAI,KAAK,IAAI,MAAM,IAAU,MAC7B,KAAK,IAAI,MAAM,GACR;AAAA,MACT,CAAC;AAAA,IACH;AAEE,aAAO,GAAG,IAAI,qBAAqB,MAAkB;AAAA,EAEzD;AACA,SAAO;AACT;AAEA,SAAS,8BAA8B,OAA0C;AAC/E,MAAI,CAAC,SAAS,OAAO,SAAU,SAAU,QAAO;AAEhD,MAAM,UAAW,MAA6B;AAC9C,MAAI,OAAO,WAAY,YAAY,QAAQ,KAAK,EAAE,WAAW;AAC3D,WAAO;AAGT,MAAM,MAA6B;AAAA,IACjC,MAAM;AAAA,IACN,OAAO,CAAC;AAAA,EACV,GAEM,aAAc,MAAgC;AACpD,EAAI,OAAO,cAAe,YAAY,WAAW,KAAK,EAAE,SAAS,MAC/D,IAAI,UAAU;AAGhB,MAAM,UAAW,MAAqC;AACtD,MAAI,MAAM,QAAQ,OAAO,GAAG;AAC1B,QAAM,OAAO,QAAQ,OAAO,CAAC,MAAmB,OAAO,KAAM,YAAY,EAAE,KAAK,EAAE,SAAS,CAAC;AAC5F,IAAI,KAAK,SAAS,MAChB,IAAI,eAAe,qBAAqB,IAAI;AAAA,EAEhD;AAEA,MAAM,eAAgB,MAAkC;AACxD,MAAI,MAAM,QAAQ,YAAY,GAAG;AAC/B,QAAM,YAAY,aAAa,OAAO,CAAC,MAAmB,OAAO,KAAM,YAAY,EAAE,KAAK,EAAE,SAAS,CAAC;AACtG,IAAI,UAAU,SAAS,MACrB,IAAI,YAAY,qBAAqB,SAAS;AAAA,EAElD;AAEA,MAAM,eAAgB,MAAkC;AACxD,EAAI,OAAO,gBAAiB,YAAY,aAAa,KAAK,EAAE,SAAS,MACnE,IAAI,YAAY,aAAa,KAAK;AAGpC,MAAM,WAAY,MAA8B;AAChD,MAAI,YAAY,OAAO,YAAa,UAAU;AAC5C,QAAM,QAAgE,CAAC;AACvE,aAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,QAAmC,GAAG;AACxE,UAAI,OAAO,KAAM,YAAY,CAAC,MAAM,QAAQ,CAAC,EAAG;AAChD,UAAM,gBAAgB,2BAA2B,CAAC,GAC5C,UAAkD,CAAC;AACzD,eAAW,QAAQ,GAAgB;AACjC,YAAI,OAAO,QAAS,UAAU;AAC5B,cAAM,UAAU,KAAK,KAAK;AAC1B,cAAI,CAAC,QAAS;AACd,kBAAQ,KAAK,2BAA2B,OAAO,CAAC;AAChD;AAAA,QACF;AACA,YAAI,QAAQ,OAAO,QAAS,UAAU;AACpC,cAAM,YAAa,KAAa;AAChC,cAAI,OAAO,aAAc,YAAY,UAAU,KAAK,EAAE,WAAW,EAAG;AACpE,cAAM,UAAqC;AAAA,YACzC,QAAQ,2BAA2B,SAAS;AAAA,UAC9C,GACM,WAAY,KAAa;AAC/B,WAAI,aAAa,UAAU,aAAa,aAAa,aAAa,aAAa,aAAa,iBAC1F,QAAQ,QAAQ;AAElB,cAAM,UAAW,KAAa;AAC9B,cAAI,MAAM,QAAQ,OAAO,GAAG;AAC1B,gBAAM,cAAc,QAAQ,OAAO,CAAC,MAAW,OAAO,KAAM,YAAY,EAAE,KAAK,EAAE,SAAS,CAAC;AAC3F,YAAI,YAAY,SAAS,MACvB,QAAQ,OAAO;AAAA,UAEnB;AACA,cAAM,UAAW,KAAa;AAC9B,UAAI,OAAO,WAAY,YAAY,QAAQ,KAAK,EAAE,SAAS,MACzD,QAAQ,OAAO;AAEjB,cAAM,gBAAiB,KAAa;AACpC,UAAI,OAAO,iBAAkB,YAAY,cAAc,KAAK,EAAE,SAAS,MACrE,QAAQ,aAAa,gBAEvB,QAAQ,KAAK,OAAO;AAAA,QACtB;AAAA,MACF;AACA,MAAI,QAAQ,WAAW,MACvB,MAAM,aAAa,IAAI;AAAA,IACzB;AACA,QAAI,QAAQ,iBAAiB,KAAK;AAAA,EACpC;AAGA,MAAM,iBAAkB,MAAoC;AAC5D,MAAI,kBAAkB,OAAO,kBAAmB,UAAU;AACxD,QAAM,MAAO,eAAuB,KAC9B,YAAa,eAAuB,WACpC,aAAc,eAAuB;AAE3C,IAAI,OAAO,OAAQ,YAAY,IAAI,KAAK,EAAE,SAAS,KAC/C,OAAO,aAAc,YAAY,UAAU,KAAK,EAAE,SAAS,KAC3D,OAAO,cAAe,YAAY,WAAW,KAAK,EAAE,SAAS,MAC/D,IAAI,cAAc,EAAE,KAAK,WAAW,WAAW;AAAA,EAEnD;AAEA,MAAM,gBAAiB,MAAmC;AAC1D,GAAI,kBAAkB,aAAa,kBAAkB,YAAY,kBAAkB,cAAc,kBAAkB,WACjH,IAAI,aAAa;AAGnB,MAAM,YAAa,MAA+B;AAClD,EAAI,OAAO,aAAc,YAAY,UAAU,KAAK,EAAE,SAAS,MAC7D,IAAI,SAAS,UAAU,KAAK;AAG9B,MAAM,kBAAmB,MAAqC;AAC9D,UAAI,oBAAoB,UAAU,oBAAoB,cACpD,IAAI,eAAe,kBAGd;AACT;AAEA,SAAS,2BAA2B,MAAkC;AACpE,MAAI,CAAC,QAAQ,OAAO,QAAS,SAAU,QAAO;AAC9C,MAAM,kBAAmB,KAAgC;AACzD,MAAI,CAAC,mBAAmB,OAAO,mBAAoB;AACjD,WAAO,EAAE,UAAU,CAAC,EAAE;AAGxB,MAAM,WAAkD,CAAC;AACzD,WAAW,CAAC,SAAS,QAAQ,KAAK,OAAO,QAAQ,eAA0C,GAAG;AAC5F,QAAI,OAAO,WAAY,YAAY,QAAQ,KAAK,EAAE,WAAW,EAAG;AAChE,QAAM,YAAY,8BAA8B,QAAQ;AACxD,IAAI,cACF,SAAS,OAAO,IAAI;AAAA,EAExB;AAEA,SAAO,EAAE,SAAS;AACpB;AAEA,eAAsB,mBAAmB,WAAkD;AACzF,MAAM,YAAY,sBAAsB,SAAS;AAEjD,MAAI,CAAE,MAAM,OAAO,SAAS;AAC1B,WAAO;AAAA,MACL,MAAM;AAAA,MACN,OAAO,EAAE,UAAU,CAAC,EAAE;AAAA,IACxB;AAGF,MAAI;AACF,QAAM,UAAU,MAAM,aAAa,SAAS,GACtC,SAAc,WAAK,OAAO,GAC1B,YAAY,2BAA2B,MAAM;AACnD,WAAK,YAOE,EAAE,MAAM,WAAW,OAAO,0BAA0B,SAAS,EAAE,KANpE,OAAO,KAAK,uCAAuC,SAAS,oBAAoB,GACzE,EAAE,MAAM,WAAW,OAAO,EAAE,UAAU,CAAC,EAAE,EAAE;AAAA,EAMtD,SAAS,OAAO;AACd,kBAAO,KAAK,qCAAqC,SAAS,KAAK,KAAK,EAAE,GAC/D,EAAE,MAAM,WAAW,OAAO,EAAE,UAAU,CAAC,EAAE,EAAE;AAAA,EACpD;AACF;AAWA,SAAS,0BAA0B,OAAuC;AACxE,MAAM,mBAA0D,CAAC;AAEjE,WAAW,CAAC,SAAS,OAAO,KAAK,OAAO,QAAQ,MAAM,QAAQ,GAAG;AAC/D,QAAM,iBAAiB,QAAQ,KAAK,QAAQ,OAAO,GAAG,GAKhDC,eAAc,CAAC,QAAQ,SACvB,aAAa,eAAe,SAAS,0BAA0B,KAClD,eAAe,SAAS,yBAAyB;AAEpE,QAAI,CAACA,gBAAe,CAAC,YAAY;AAE/B,uBAAiB,OAAO,IAAI;AAC5B;AAAA,IACF;AAKA,QAAM,gBADgB,eAAe,MAAM,uDAAuD,IAC5D,CAAC,KAAK,QAGxC,UACA,MACA,kBAAkB,IAClB;AAGJ,QAAI,QAAQ,WAAW,KAAK,GAAG;AAC7B,wBAAkB;AAClB,UAAM,UAAU,QAAQ,MAAM,oCAAoC;AAClE,MAAI,YACF,WAAW,QAAQ,CAAC,GACpB,OAAO,QAAQ,CAAC,GAChB,sBAAsB,QAAQ,CAAC,KAAK;AAAA,IAExC,WAES,QAAQ,WAAW,GAAG,GAAG;AAChC,UAAM,UAAU,QAAQ,MAAM,kCAAkC;AAChE,MAAI,YACF,WAAW,QAAQ,CAAC,GACpB,OAAO,QAAQ,CAAC,GAChB,sBAAsB,QAAQ,CAAC,KAAK;AAAA,IAExC,OAEK;AACH,UAAM,YAAY,QAAQ,MAAM,iCAAiC;AACjE,MAAI,cACF,WAAW,UAAU,CAAC,GACtB,OAAO,UAAU,CAAC,GAClB,sBAAsB,UAAU,CAAC,KAAK;AAAA,IAE1C;AAGA,QAAI,CAAC,YAAY,CAAC,MAAM;AACtB,uBAAiB,OAAO,IAAI;AAC5B;AAAA,IACF;AAKA,QAAM,cAAc,qBAEhB,gBAAoC;AACxC,QAAI,iBAAiB,aAAa;AAChC,UAAM,aAAa,cAAc,QAAQ,OAAO,GAAG,GAC7C,WAAW,YAAY,QAAQ,OAAO,GAAG;AAE/C,MAAI,aAAa,cAGN,WAAW,SAAS,IAAI,QAAQ,EAAE,IAD3C,gBAAgB,iBAIP,SAAS,WAAW,GAAG,UAAU,GAAG,GAE7C,gBAAgB;AAAA,IAKpB,MAAO,CAAI,CAAC,iBAAiB,cAE3B,gBAAgB,cACP,iBAAiB,CAAC,gBAE3B,gBAAgB;AAIlB,QAAM,cAAc,gBAChB,MAAM,QAAQ,IAAI,IAAI,IAAI,aAAa,KACvC,MAAM,QAAQ,IAAI,IAAI;AAG1B,qBAAiB,WAAW,IAAI;AAAA,EAClC;AAEA,SAAO,EAAE,UAAU,iBAAiB;AACtC;AAEA,eAAsB,oBAAoB,QAA6C;AACrF,MAAM,YAAY,OAAO,MAInB,WADW,0BAA0B,OAAO,KAAK,EAC7B,YAAY,CAAC,GAEjC,iBAAwD,CAAC;AAC/D,WAAW,WAAW,OAAO,KAAK,QAAQ,EAAE,KAAK,GAAG;AAClD,QAAM,MAAM,SAAS,OAAO,GACtB,YAAmC;AAAA,MACvC,MAAM,IAAI;AAAA,MACV,OAAO,iBAAiB,IAAI,SAAS,CAAC,CAAC;AAAA,IACzC;AACA,IAAI,IAAI,YACN,UAAU,UAAU,IAAI,UAEtB,IAAI,gBAAgB,IAAI,aAAa,SAAS,MAChD,UAAU,eAAe,qBAAqB,IAAI,YAAY,IAE5D,IAAI,aAAa,IAAI,UAAU,SAAS,MAC1C,UAAU,YAAY,qBAAqB,IAAI,SAAS,IAEtD,IAAI,cACN,UAAU,YAAY,IAAI,YAExB,IAAI,gBACN,UAAU,cAAc,IAAI,cAE1B,IAAI,eACN,UAAU,aAAa,IAAI,aAEzB,IAAI,WACN,UAAU,SAAS,IAAI,SAErB,IAAI,iBACN,UAAU,eAAe,IAAI,eAE/B,eAAe,OAAO,IAAI;AAAA,EAC5B;AAEA,QAAM,UAAUC,SAAQ,SAAS,CAAC;AAElC,MAAM,OAAY;AAAA,IAChB;AAAA,MACE,UAAU;AAAA,IACZ;AAAA,IACA;AAAA,MACE,WAAW;AAAA,MACX,UAAU;AAAA,IACZ;AAAA,EACF,GAEM,aAAa,GAAG,cAAc;AAAA;AAAA,EAAO,IAAI,IACzC,WAAW,GAAG,SAAS;AAC7B,MAAI;AACF,UAAMC,IAAG,UAAU,UAAU,YAAY,MAAM,GAC/C,MAAMA,IAAG,OAAO,UAAU,SAAS;AAAA,EACrC,SAAS,OAAO;AACd,QAAI;AAAE,YAAMA,IAAG,OAAO,QAAQ;AAAA,IAAG,QAAQ;AAAA,IAA6B;AACtE,UAAM,IAAI,gBAAgB,oCAAoC,SAAS,IAAI,EAAE,MAAM,WAAW,MAAM,CAAC;AAAA,EACvG;AACF;;;ACrYO,IAAM,wBAAN,MAA2D;AAAA,EAChE,UAAU,QAAgC;AACxC,WAAO,OAAO,SAAS;AAAA,EACzB;AAAA,EAEA,MAAM,KACJ,QACA,SACA,aACwB;AACxB,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,gBAAgB,QAAQ,gDAAgD;AAGpF,QAAI;AAEF,UAAI,OAAO,aAAa;AACtB,YAAMC,OAAM,MAAM,oBAAoB,OAAO,aAAa;AAAA,UACxD,aAAa,OAAO;AAAA,QACtB,CAAC,GACKC,YAAWD,KAAI,UACfE,WAAU,OAAO,WAAWD,UAAS,WAAW;AAEtD,eAAO;AAAA,UACL,UAAAA;AAAA,UACA,aAAa,OAAO;AAAA,UACpB,SAAAC;AAAA,UACA,aAAa,OAAO;AAAA,UACpB,QAAQ;AAAA,UACR,gBAAiBF,KAAY,UAAU;AAAA,YACrC,UAAU;AAAA,YACV,YAAY;AAAA,YACZ,QAASA,KAAY;AAAA,UACvB,IAAI;AAAA,QACN;AAAA,MACF;AAIA,UAAM,EAAE,MAAM,IAAI,MAAM,mBAAmB,YAAY,SAAS,GAC1D,QAAQ,MAAM,WAAW,OAAO,WAAW;AAEjD,UAAI,CAAC,OAAO;AACV,cAAM,IAAI;AAAA,UACR;AAAA,UACA,YAAY,OAAO,WAAW,2DACT,OAAO,WAAW;AAAA,QACzC;AAKF,UAAM,cADW,oBAAoB,MAAM,MAAM,YAAY,SAAS,EACzC,UAGvB,MAAM,MAAM,oBAAoB,aAAa;AAAA,QACjD,aAAa,OAAO;AAAA,QACpB,QAAQ,OAAO;AAAA,QACf,MAAM,OAAO;AAAA,MACf,CAAC,GACK,WAAW,IAAI,UAEf,UAAU,MAAM,WAAW,SAAS,WAAW;AAErD,aAAO;AAAA,QACL;AAAA,QACA,aAAa,OAAO;AAAA,QACpB;AAAA,QACA;AAAA,QACA,QAAQ;AAAA,QACR,gBAAiB,IAAY,UAAU;AAAA,UACrC,UAAU;AAAA,UACV,YAAY;AAAA,UACZ,QAAS,IAAY;AAAA,QACvB,IAAI;AAAA,MACN;AAAA,IACF,SAAS,OAAO;AACd,YAAI,iBAAiB,kBACb,QAGF,IAAI;AAAA,QACR;AAAA,QACA,2BAA2B,OAAO,WAAW;AAAA,QAC7C;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEF;;;AC3FA,IAAM,UAAiC;AAAA,EACrC,IAAI,qBAAqB;AAAA,EACzB,IAAI,iBAAiB;AAAA,EACrB,IAAI,gBAAgB;AAAA,EACpB,IAAI,sBAAsB;AAC5B;AAKO,SAAS,mBAAmB,QAA4C;AAC7E,MAAM,SAAS,QAAQ,KAAK,OAAK,EAAE,UAAU,MAAM,CAAC;AAEpD,MAAI,CAAC;AACH,UAAM,IAAI,MAAM,wCAAwC,OAAO,IAAI,EAAE;AAGvE,SAAO;AACT;;;ACtBO,SAAS,qBAAqB,KAAmC;AACtE,SACE,IAAI,SAAS,WACb,IAAI,OAAO,SAAS,eACpB,IAAI,QAAQ,uBAAuB;AAEvC;AAKO,SAAS,WAAW,KAA0B,SAAuB;AAC1E,EAAK,IAAI,SAAS,SAAS,OAAO,MAChC,IAAI,SAAS,KAAK,OAAO,GACb,cAAc,IAAI,SAAS,EACnC,KAAK,OAAO;AAEpB;AAKO,SAAS,SAAS,KAA0B,SAAuB;AACxE,EAAK,IAAI,OAAO,SAAS,OAAO,KAC9B,IAAI,OAAO,KAAK,OAAO;AAE3B;AAKO,SAAS,qBAAqB,KAAkC;AACrE,MAAM,EAAE,OAAO,IAAI;AAEnB,UAAQ,OAAO,MAAM;AAAA,IACnB,KAAK;AACH,aAAO,OAAO,UACV,GAAG,OAAO,WAAW,IAAI,OAAO,OAAO,KACvC,OAAO;AAAA,IAEb,KAAK;AAGH,UAAI,OAAO,gBAAgB,qBAAqB,OAAO,gBAAgB,kBAAkB;AACvF,YAAM,YAAY,OAAO,eAAe,kBAAkB;AAC1D,eAAO,OAAO,eAAe,aAAa;AAAA,MAC5C;AACA,aAAO,GAAG,OAAO,WAAW,UAAU,OAAO,SAAS;AAAA,IAExD,KAAK;AACH,UAAM,MAAM,OAAO,SAAS,IAAI,OAAO,MAAM,KAAK,IAC5C,SAAS,OAAO,UAAU,SAAS,OAAO,OAAO,KAAK;AAC5D,aAAO,GAAG,OAAO,WAAW,SAAS,OAAO,MAAM,GAAG,GAAG,GAAG,MAAM;AAAA,IAEnE,KAAK;AACH,aAAO,GAAG,OAAO,WAAW;AAAA,IAE9B;AACE,aAAO,OAAO;AAAA,EAClB;AACF;;;AChEA,SAAS,QAAAG,QAAM,YAAAC,iBAAgB;AAC/B,SAAS,QAAAC,aAAY;AA6BrB,eAAsB,uBACpB,UACA,SACA,cACA,SACuC;AACvC,MAAM,kBAAkBC,OAAK,UAAU,YAAY,GAC7C,SAASC,UAAS,SAAS,eAAe,EAAE,QAAQ,OAAO,GAAG;AAGpE,MAAI,OAAO,WAAW,IAAI;AACxB,WAAO;AAIT,MAAM,eAAe,OAAO,QAAQ,UAAU,EAAE,GAE5CC,eAAc;AAClB,MAAI;AAEF,IAAAA,gBADU,MAAMC,MAAK,eAAe,GACpB,YAAY;AAAA,EAC9B,QAAQ;AACN,QAAI,SAAS;AACX,YAAM,IAAI;AAAA,QACR,iEAAiE,YAAY;AAAA;AAAA;AAAA,MAE/E;AAAA,EAGJ;AAGA,MAAM,SAAS,aAAa,QAAQ,OAAO,EAAE,GACvC,UAAUD,eAAe,SAAS,GAAG,MAAM,QAAQ,OAAQ;AAEjE,SAAO,EAAE,SAAS,cAAc,aAAAA,cAAa,QAAQ;AACvD;AAwBO,SAAS,mBACd,KACA,QAC0B;AAC1B,MAAM,gBAAqB,OAAO,gBAAgB;AAClD,MAAI,CAAC;AACH,WAAO,CAAC;AAIV,MAAI,IAAI,OAAO;AACb,kBAAO,MAAM,gEAAgE,GACtE,CAAC;AAIV,MAAI,OAAO,gBAAgB,eAAe;AACxC,eAAI,OAAO,0BAA0B,IAC9B,EAAE,iBAAiB,cAAc;AAG1C,EAAI,eAAe,SACjB,IAAI,eAAe,cAAc,MAEjC,IAAI,OAAO,eAAe,cAAc,OAGtC,eAAe,kBAAkB,CAAC,IAAI,mBACxC,IAAI,iBAAiB,cAAc,iBAGjC,eAAe,cACjB,IAAI,aAAa,cAAc;AAIjC,MAAM,uBAAuB,IAAI,gBAAgB,OAAO;AACxD,EAAI,yBACF,IAAI,OAAO,cAAc;AAI3B,MAAM,WAAW,OAAO,gBAAgB;AAWxC,SAVI,CAAC,IAAI,gBAAgB,YAAY,IAAI,eACvC,IAAI,eAAeD,UAAS,UAAU,IAAI,YAAY,KAAK,MAClD,CAAC,IAAI,gBAAgB,OAAO,eAAe,IAAI,iBAExD,IAAI,eAAeA,UAAS,OAAO,aAAa,IAAI,YAAY,KAAK,MAIvE,IAAI,OAAO,0BAA0B,IAEjC,eAAe,cAAc,gBACxB,EAAE,iBAAiB,cAAc,IAGtC,eAAe,cAAc,eAAe,MAAM,QAAQ,cAAc,gBAAgB,IACnF,EAAE,iBAAiB,aAAa,kBAAkB,cAAc,iBAAiB,IAGnF,CAAC;AACV;AAeA,eAAsB,mBACpB,KACA,QACA,cACe;AAEf,MAAI,IAAI,uBAAuB;AAC7B,WAAO,MAAM,+DAA+D;AAC5E;AAAA,EACF;AAEA,MAAM,WAAW,OAAO,gBAAgB,YAAY,OAAO;AAC3D,MAAI,CAAC;AACH;AAGF,MAAM,UAAU,IAAI,gBAAgB,OAAO;AAC3C,MAAI,CAAC;AACH;AAGF,MAAM,SAAS,MAAM,uBAAuB,UAAU,SAAS,cAAc,EAAE,QAAQ,GAAK,CAAC;AAC7F,MAAI,CAAC;AAEH,UAAM,IAAI;AAAA,MACR,4DAA4D,YAAY;AAAA;AAAA;AAAA,IAE1E;AAGF,MAAI,iBAAiB,OAAO,SAK5B,IAAI,eAAe,OAAO,YAAY,OAAO,SAAS,UAGtD,IAAI,wBAAwB;AAC9B;;;AChMA,eAAsB,iBAAiB,KAA0B,QAAqB,iBAAiD;AAGrI,MAAI,IAAI,OAAO,eAAe,IAAI,OAAO,eAAe,IAAI,iBAAiB,SAAS;AACpF;AAGF,MAAM,MAAM,UAAU,cAAc,IAAI,SAAS,GAG3C,UAAU,mBAAmB,IAAI,QAAQ,GACzC,UAAU,CAAC;AAEjB,MAAI;AAEF,QAAM,SAAS,mBAAmB,IAAI,MAAM,GAItC,aAAa,WADC,qBAAqB,GAAG,CACH;AACzC,IAAI,UACF,QAAQ,MAAM,UAAU,IAExB,QAAQ,QAAQ,UAAU;AAI5B,QAAM,SAAS,MAAM,OAAO,KAAK,IAAI,QAAQ,IAAI,SAAS,IAAI,WAAW,OAAO;AAKhF,IAAI,WACF,QAAQ,KAAK,GAIf,IAAI,OAAO,cAAc,OAAO,aAChC,IAAI,OAAO,UAAU,OAAO,SAIxB,IAAI,OAAO,oBAAoB,SAEjC,IAAI,OAAO,UAAU,IAAI,OAAO,kBACvB,OAAO,UAAU,WAAW,OAAO,SAAS,YAAY,OAAO,YAExE,IAAI,OAAO,UAAU,OAAO,SAAS,UAMvC,mBAAmB,KAAK,MAAM,GAIzB,IAAI,OAAO,gBACd,IAAI,OAAO,cAAc,OAAO;AAKlC,QAAM,eAAgB,IAAI,OAAe;AACzC,IAAI,gBACF,MAAM,mBAAmB,KAAK,QAAQ,YAAY,GAGpD,IAAI,OAAO,iBAAiB,OAAO,gBAG/B,OAAO,gBAAgB,cACpB,IAAI,OAAO,mBACd,IAAI,OAAO,iBAAiB,EAAE,UAAU,GAAM,IAE5C,CAAC,IAAI,OAAO,eAAe,qBAAqB,OAAO,eAAe,cAEvE,IAAI,OAAe,aAAa,OAAO,eAAe;AAK3D,QAAI;AACJ,YAAQ,IAAI,OAAO,MAAM;AAAA,MACvB,KAAK;AACH,yBAAiB;AACjB;AAAA,MACF,KAAK;AACH,yBAAiB;AACjB;AAAA,MACF,KAAK;AACH,yBAAiB;AACjB;AAAA,MACF,KAAK;AACH,yBAAiB;AACjB;AAAA,IACJ;AAIA,QAAM,mBAAmB,IAAI,OAAO,WAAW,OAAO,SAEhD,cAAmB;AAAA,MACvB,MAAM,OAAO;AAAA,MACb,SAAS;AAAA,MACT,KAAK;AAAA,QACH,UAAU,OAAO;AAAA,QACjB,OAAO,CAAC;AAAA,QACR,SAAU,OAAO,UAAkB,WAAW,IAAI,OAAO,gBAAgB;AAAA,MAC3E;AAAA,MACA,QAAQ;AAAA,MACR,QAAQ;AAAA,MACR,aAAa,IAAI,OAAO,eAAe,OAAO;AAAA;AAAA,IAChD;AAGA,IAAI,IAAI,OAAO,gBAAgB,sBAC7B,YAAY,sBAAsB,IAAI,OAAO,eAAe,oBAI1D,IAAI,OAAO,oBAAoB,WACjC,YAAY,kBAAkB,IAAI,OAAO,kBAG3C,IAAI,mBAAmB,CAAC,WAAW,GAEnC,OAAO,KAAK,UAAU,OAAO,WAAW,IAAI,gBAAgB,SAAS,OAAO,MAAM,EAAE;AAAA,EAEtF,SAAS,OAAO;AACd,IAAI,WACF,QAAQ,KAAK;AAEf,QAAM,WAAW,2BAA2B,KAAK;AACjD,mBAAS,KAAK,QAAQ,GAChB,IAAI,MAAM,QAAQ;AAAA,EAC1B;AACF;;;AChJA,SAAS,aAAAG,kBAAiB;;;ACA1B,SAAS,aAAAC,kBAAiB;AASnB,SAAS,eAAe,KAAUC,OAAmB;AAC1D,MAAI,CAACA;AACH,WAAO;AAGT,MAAM,OAAOA,MAAK,MAAM,GAAG,GACvB,UAAU;AAEd,WAAW,OAAO;AAChB,QAAI,WAAW,OAAO,WAAY,YAAY,OAAO;AACnD,gBAAU,QAAQ,GAAG;AAAA;AAErB;AAIJ,SAAO;AACT;AAUO,SAAS,eAAe,KAAUA,OAAc,OAAkB;AACvE,MAAI,CAACA;AACH;AAGF,MAAM,OAAOA,MAAK,MAAM,GAAG,GACvB,UAAU;AAGd,WAAS,IAAI,GAAG,IAAI,KAAK,SAAS,GAAG,KAAK;AACxC,QAAM,MAAM,KAAK,CAAC;AAElB,KAAI,EAAE,OAAO,YAAY,OAAO,QAAQ,GAAG,KAAM,YAAY,QAAQ,GAAG,MAAM,UAC5E,QAAQ,GAAG,IAAI,CAAC,IAGlB,UAAU,QAAQ,GAAG;AAAA,EACvB;AAGA,MAAM,WAAW,KAAK,KAAK,SAAS,CAAC;AACrC,UAAQ,QAAQ,IAAI;AACtB;AAQO,SAAS,kBAAkB,KAAUA,OAAoB;AAC9D,MAAI,CAACA;AACH;AAGF,MAAM,OAAOA,MAAK,MAAM,GAAG,GACvB,UAAU;AAGd,WAAS,IAAI,GAAG,IAAI,KAAK,SAAS,GAAG,KAAK;AACxC,QAAM,MAAM,KAAK,CAAC;AAElB,QAAI,EAAE,OAAO,YAAY,OAAO,QAAQ,GAAG,KAAM;AAC/C;AAGF,cAAU,QAAQ,GAAG;AAAA,EACvB;AAGA,MAAM,WAAW,KAAK,KAAK,SAAS,CAAC;AACrC,SAAO,QAAQ,QAAQ;AACzB;AAeO,SAAS,aAAa,OAAY,SAAmC;AAE1E,MAAI,YAAY;AACd,WAAO;AAIT,MAAI,OAAO,WAAY;AACrB,WAAI,OAAO,SAAU,WACZ,KAEFD,WAAU,OAAO,OAAO;AAIjC,MAAI,OAAO,WAAY,YAAY,YAAY,MAAM;AACnD,QAAI,OAAO,SAAU,YAAY,UAAU;AACzC,aAAO;AAIT,aAAW,CAAC,KAAK,YAAY,KAAK,OAAO,QAAQ,OAAO;AAEtD,UAAI,QAAQ;AAMV,YAAI,CAJmB,OAAO,OAAO,KAAK,EAAE,MAAM,OAC5C,iBAAiB,MAAY,KAC1B,MAAM,YACd,EACoB,QAAO;AAAA,iBAGxB,EAAE,OAAO,UAGT,iBAAiB,OAAO,MAAM,GAAG,MAAM;AACzC,eAAO;AAKb,WAAO;AAAA,EACT;AAGA,SAAO,UAAU;AACnB;AASO,SAAS,YAAY,KAAU,SAAS,IAAc;AAC3D,MAAM,OAAiB,CAAC;AAExB,MAAI,OAAO,OAAQ,YAAY,QAAQ;AACrC,WAAO,CAAC;AAGV,WAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,GAAG,GAAG;AAC9C,QAAM,UAAU,SAAS,GAAG,MAAM,IAAI,GAAG,KAAK;AAC9C,SAAK,KAAK,OAAO,GAGb,OAAO,SAAU,YAAY,UAAU,QAAQ,CAAC,MAAM,QAAQ,KAAK,KACrE,KAAK,KAAK,GAAG,YAAY,OAAO,OAAO,CAAC;AAAA,EAE5C;AAEA,SAAO;AACT;AAUO,SAAS,cAAc,SAAqD;AACjF,MAAM,gBAAgB,QAAQ,QAAQ,GAAG;AAEzC,MAAI,kBAAkB;AACpB,WAAO,EAAE,QAAQ,SAAS,QAAQ,GAAG;AAGvC,MAAM,SAAS,QAAQ,UAAU,GAAG,aAAa,GAC3C,SAAS,QAAQ,UAAU,gBAAgB,CAAC;AAElD,SAAO,EAAE,QAAQ,OAAO;AAC1B;AAQO,SAAS,gBAAgB,KAAU,QAAgB,QAA0B;AAGlF,SAFiB,YAAY,GAAG,EAEhB,OAAO,SACjB,UAAU,CAAC,IAAI,WAAW,MAAM,KAGhC,UAAU,CAAC,IAAI,SAAS,MAAM,IACzB,KAGY,oBAAoB,KAAK,QAAQ,MAAM,EACxC,SAAS,CAC9B;AACH;AASO,SAAS,oBAAoB,KAAa,QAAgB,QAAwB;AACvF,MAAI,SAAS;AAEb,SAAI,WACF,SAAS,OAAO,UAAU,OAAO,MAAM,IAGrC,WACF,SAAS,OAAO,UAAU,GAAG,OAAO,SAAS,OAAO,MAAM,IAGrD;AACT;AAmBO,SAAS,qBACd,UACA,SACU;AACV,MAAM,WAAW,QAAQ,MAAM,GAAG,GAC5B,UAAoB,CAAC;AAE3B,WAAS,SAAS,KAAU,OAAe,aAAuB;AAChE,QAAI,SAAS,SAAS,QAAQ;AAE5B,cAAQ,KAAK,YAAY,KAAK,GAAG,CAAC;AAClC;AAAA,IACF;AAEA,QAAM,UAAU,SAAS,KAAK;AAE9B,QAAI,YAAY;AAEd,UAAI,OAAO,OAAQ,YAAY,QAAQ,QAAQ,CAAC,MAAM,QAAQ,GAAG;AAC/D,iBAAW,OAAO,OAAO,KAAK,GAAG;AAC/B,mBAAS,IAAI,GAAG,GAAG,QAAQ,GAAG,CAAC,GAAG,aAAa,GAAG,CAAC;AAAA;AAKvD,MAAI,OAAO,OAAQ,YAAY,QAAQ,QAAQ,WAAW,OACxD,SAAS,IAAI,OAAO,GAAG,QAAQ,GAAG,CAAC,GAAG,aAAa,OAAO,CAAC;AAAA,EAGjE;AAEA,kBAAS,UAAU,GAAG,CAAC,CAAC,GACjB;AACT;AAKO,SAAS,UAAa,KAAW;AACtC,MAAI,QAAQ,QAAQ,OAAO,OAAQ;AACjC,WAAO;AAGT,MAAI,MAAM,QAAQ,GAAG;AACnB,WAAO,IAAI,IAAI,UAAQ,UAAU,IAAI,CAAC;AAGxC,MAAM,SAAc,CAAC;AACrB,WAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,GAAG;AAC3C,WAAO,GAAG,IAAI,UAAU,KAAK;AAG/B,SAAO;AACT;;;ACvSO,SAAS,aAAa,OAAY,SAA0B;AAEjE,MAAI,OAAO,SAAU,UAAU;AAE7B,QAAI,MAAM,WAAW,MAAM;AACzB,aAAO,MAAM,UAAU,CAAC;AAI1B,QAAI,MAAM,WAAW,IAAI,GAAG;AAC1B,UAAM,UAAU,MAAM,UAAU,CAAC;AAGjC,aAAI,WAAW,UACN,QAAQ,OAAO,IAIjB;AAAA,IACT;AAGA,WAAO;AAAA,EACT;AAGA,MAAI,OAAO,SAAU,YAAY,UAAU,MAAM;AAC/C,QAAI,MAAM,QAAQ,KAAK;AACrB,aAAO,MAAM,IAAI,UAAQ,aAAa,MAAM,OAAO,CAAC;AAGtD,QAAM,SAAc,CAAC;AACrB,aAAW,CAAC,KAAK,GAAG,KAAK,OAAO,QAAQ,KAAK;AAC3C,aAAO,GAAG,IAAI,aAAa,KAAK,OAAO;AAEzC,WAAO;AAAA,EACT;AAGA,SAAO;AACT;;;AC/CO,SAAS,WACd,UACA,WACA,SACK;AACL,MAAM,SAAS,EAAE,GAAG,SAAS,GACvB,SAAS,UAAU;AAEzB,WAAW,CAAC,WAAW,KAAK,KAAK,OAAO,QAAQ,MAAM,GAAG;AAEvD,QAAM,gBAAgB,aAAa,OAAO,OAAO;AAGjD,mBAAe,QAAQ,WAAW,aAAa;AAAA,EACjD;AAEA,SAAO;AACT;AAKO,SAAS,YAAY,WAA+D;AACzF,MAAM,SAAmB,CAAC;AAE1B,MAAI,CAAC,UAAU,QAAQ,OAAO,UAAU,QAAS;AAC/C,kBAAO,KAAK,wBAAwB,GAC7B,EAAE,OAAO,IAAO,OAAO;AAGhC,EAAI,OAAO,KAAK,UAAU,IAAI,EAAE,WAAW,KACzC,OAAO,KAAK,mCAAmC;AAGjD,WAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,UAAU,IAAI;AACtD,KAAI,CAAC,OAAO,IAAI,KAAK,MAAM,OACzB,OAAO,KAAK,iCAAiC;AAIjD,SAAO;AAAA,IACL,OAAO,OAAO,WAAW;AAAA,IACzB;AAAA,EACF;AACF;;;ACpCO,SAAS,cACd,UACA,WACK;AACL,MAAM,SAAS,EAAE,GAAG,SAAS,GACvB,WAAW,UAAU;AAE3B,WAAW,CAAC,SAAS,OAAO,KAAK,OAAO,QAAQ,QAAQ;AACtD,IAAI,QAAQ,SAAS,GAAG,IAEtB,mBAAmB,QAAQ,SAAS,OAAO,IAG3C,aAAa,QAAQ,SAAS,OAAO;AAIzC,SAAO;AACT;AAKA,SAAS,aAAa,KAAU,SAAiB,SAAuB;AACtE,MAAM,QAAQ,eAAe,KAAK,OAAO;AAEzC,EAAI,UAAU,WACZ,eAAe,KAAK,SAAS,KAAK,GAClC,kBAAkB,KAAK,OAAO;AAElC;AAMA,SAAS,mBAAmB,KAAU,YAAoB,YAA0B;AAElF,MAAM,EAAE,QAAQ,WAAW,QAAQ,UAAU,IAAI,cAAc,UAAU,GACnE,EAAE,QAAQ,WAAW,QAAQ,UAAU,IAAI,cAAc,UAAU,GAGnE,eAAe,gBAAgB,KAAK,WAAW,SAAS;AAG9D,WAAW,UAAU,cAAc;AAEjC,QAAM,eAAe,oBAAoB,QAAQ,WAAW,SAAS,GAG/D,SAAS,YAAY,eAAe,WAGpC,QAAQ,eAAe,KAAK,MAAM;AACxC,IAAI,UAAU,WACZ,eAAe,KAAK,QAAQ,KAAK,GACjC,kBAAkB,KAAK,MAAM;AAAA,EAEjC;AACF;AAKO,SAAS,eAAe,WAAkE;AAC/F,MAAM,SAAmB,CAAC;AAE1B,MAAI,CAAC,UAAU,WAAW,OAAO,UAAU,WAAY;AACrD,kBAAO,KAAK,2BAA2B,GAChC,EAAE,OAAO,IAAO,OAAO;AAGhC,EAAI,OAAO,KAAK,UAAU,OAAO,EAAE,WAAW,KAC5C,OAAO,KAAK,8CAA8C;AAG5D,WAAW,CAAC,SAAS,OAAO,KAAK,OAAO,QAAQ,UAAU,OAAO,GAAG;AAClE,KAAI,CAAC,WAAW,QAAQ,KAAK,MAAM,OACjC,OAAO,KAAK,qCAAqC,IAG/C,CAAC,WAAW,QAAQ,KAAK,MAAM,OACjC,OAAO,KAAK,qCAAqC;AAInD,QAAM,gBAAgB,QAAQ,MAAM,KAAK,KAAK,CAAC,GAAG,QAC5C,gBAAgB,QAAQ,MAAM,KAAK,KAAK,CAAC,GAAG;AAElD,IAAI,iBAAiB,gBACnB,OAAO;AAAA,MACL,+BAA+B,OAAO,SAAS,YAAY,sBACnD,OAAO,SAAS,YAAY;AAAA,IACtC,GAGE,eAAe,KACjB,OAAO,KAAK,gEAAgE,OAAO,GAAG;AAAA,EAE1F;AAEA,SAAO;AAAA,IACL,OAAO,OAAO,WAAW;AAAA,IACzB;AAAA,EACF;AACF;;;AC/GO,SAAS,aACd,UACA,WACK;AACL,MAAM,SAAS,EAAE,GAAG,SAAS,GACvB,SAAS,MAAM,QAAQ,UAAU,MAAM,IAAI,UAAU,SAAS,CAAC,UAAU,MAAM;AAErF,WAAW,aAAa;AAEtB,QAAI,UAAU,SAAS,GAAG,GAAG;AAE3B,UAAM,eAAe,qBAAqB,QAAQ,SAAS;AAG3D,eAAWE,SAAQ;AACjB,0BAAkB,QAAQA,KAAI;AAAA,IAElC;AAEE,wBAAkB,QAAQ,SAAS;AAIvC,SAAO;AACT;AAKO,SAAS,cAAc,WAAiE;AAC7F,MAAM,SAAmB,CAAC;AAE1B,MAAI,CAAC,UAAU;AACb,kBAAO,KAAK,6CAA6C,GAClD,EAAE,OAAO,IAAO,OAAO;AAGhC,MAAM,SAAS,MAAM,QAAQ,UAAU,MAAM,IAAI,UAAU,SAAS,CAAC,UAAU,MAAM;AAErF,EAAI,OAAO,WAAW,KACpB,OAAO,KAAK,qCAAqC;AAGnD,WAAW,SAAS;AAClB,IAAI,OAAO,SAAU,WACnB,OAAO,KAAK,+BAA+B,KAClC,CAAC,SAAS,MAAM,KAAK,MAAM,OACpC,OAAO,KAAK,mCAAmC;AAInD,SAAO;AAAA,IACL,OAAO,OAAO,WAAW;AAAA,IACzB;AAAA,EACF;AACF;;;ACnDO,SAAS,cACd,UACA,WACK;AACL,MAAM,SAAS,EAAE,GAAG,SAAS,GACvB,EAAE,OAAO,OAAO,SAAS,aAAa,IAAI,UAAU,SAGpD,eAAe,eAAe,QAAQ,KAAK;AAIjD,MAAI,iBAAiB;AACnB,WAAO;AAIT,WAAW,EAAE,SAAS,MAAM,KAAK;AAC/B,QAAI,aAAa,cAAc,OAAO;AACpC,4BAAe,QAAQ,OAAO,KAAK,GAC5B;AAKX,SAAI,iBAAiB,UACnB,eAAe,QAAQ,OAAO,YAAY,GAGrC;AACT;AAKO,SAAS,eAAe,WAAkE;AAC/F,MAAM,SAAmB,CAAC;AAE1B,MAAI,CAAC,UAAU,WAAW,OAAO,UAAU,WAAY;AACrD,kBAAO,KAAK,2BAA2B,GAChC,EAAE,OAAO,IAAO,OAAO;AAGhC,MAAM,SAAS,UAAU;AAMzB,OAJI,CAAC,OAAO,SAAS,OAAO,OAAO,SAAU,aAC3C,OAAO,KAAK,0CAA0C,GAGpD,CAAC,OAAO,SAAS,CAAC,MAAM,QAAQ,OAAO,KAAK;AAC9C,kBAAO,KAAK,gCAAgC,GACrC,EAAE,OAAO,OAAO,WAAW,GAAG,OAAO;AAG9C,EAAI,OAAO,MAAM,WAAW,KAC1B,OAAO,KAAK,2CAA2C;AAGzD,WAAS,IAAI,GAAG,IAAI,OAAO,MAAM,QAAQ,KAAK;AAC5C,QAAM,aAAa,OAAO,MAAM,CAAC;AAEjC,QAAI,CAAC,cAAc,OAAO,cAAe,UAAU;AACjD,aAAO,KAAK,iBAAiB,CAAC,qBAAqB;AACnD;AAAA,IACF;AAEA,IAAM,aAAa,cACjB,OAAO,KAAK,iBAAiB,CAAC,+BAA+B,GAGzD,WAAW,cACf,OAAO,KAAK,iBAAiB,CAAC,6BAA6B;AAAA,EAE/D;AAEA,SAAO;AAAA,IACL,OAAO,OAAO,WAAW;AAAA,IACzB;AAAA,EACF;AACF;;;ACvFO,SAAS,gBACd,UACA,WACA,SACK;AACL,MAAM,SAAS,EAAE,GAAG,SAAS,GACvB,EAAE,OAAO,WAAW,IAAI,UAAU;AAKxC,MAFoB,MAAM,SAAS,GAAG,GAErB;AAEf,QAAM,eAAe,qBAAqB,QAAQ,KAAK;AAEvD,QAAI,aAAa,WAAW;AAC1B,aAAO;AAIT,aAAWC,SAAQ,cAAc;AAC/B,UAAI,QAAQ,eAAe,QAAQA,KAAI;AAEvC,eAAW,QAAQ;AACjB,gBAAQ,kBAAkB,OAAO,MAAM,OAAO;AAGhD,MAAI,UAAU,MAAO,MAAM,QAAQ,KAAK,KAAK,MAAM,WAAW,IAC5D,kBAAkB,QAAQA,KAAI,IAE9B,eAAe,QAAQA,OAAM,KAAK;AAAA,IAEtC;AAAA,EACF,OAAO;AAEL,QAAI,QAAQ,eAAe,QAAQ,KAAK;AAIxC,QAAI,UAAU;AACZ,aAAO;AAGT,aAAW,QAAQ;AACjB,cAAQ,kBAAkB,OAAO,MAAM,OAAO;AAGhD,IAAI,UAAU,MAAO,MAAM,QAAQ,KAAK,KAAK,MAAM,WAAW,IAC5D,kBAAkB,QAAQ,KAAK,IAE/B,eAAe,QAAQ,OAAO,KAAK;AAAA,EAEvC;AAEA,SAAO;AACT;AAKA,SAAS,kBAAkB,OAAY,MAAoB,SAA0B;AACnF,SAAI,aAAa,OACR,YAAY,OAAO,KAAK,OAAO,IAGpC,oBAAoB,OACf,mBAAmB,OAAO,KAAK,cAAc,IAGlD,oBAAoB,OACf,mBAAmB,OAAO,KAAK,gBAAgB,OAAO,IAG3D,UAAU,OACL,SAAS,OAAO,KAAK,IAAI,IAG9B,aAAa,OACR,YAAY,OAAO,KAAK,OAAO,IAGpC,cAAc,OACT,aAAa,OAAO,KAAK,QAAQ,IAGtC,gBAAgB,OACX,eAAe,OAAO,KAAK,UAAU,IAG1C,cAAc,OACT,aAAa,OAAO,KAAK,QAAQ,IAGtC,gBAAgB,OACX,eAAe,OAAO,KAAK,YAAY,OAAO,IAGnD,kBAAkB,OACb,iBAAiB,OAAO,KAAK,YAAY,IAI3C;AACT;AAOA,SAAS,YAAY,OAAY,QAAqD;AACpF,MAAI,OAAO,SAAU,YAAY,UAAU;AACzC,WAAO;AAGT,MAAM,SAAc,CAAC,GACf,QAAQ,OAAO,SAAS,CAAC;AAE/B,WAAW,CAAC,KAAK,GAAG,KAAK,OAAO,QAAQ,KAAK,GAAG;AAC9C,QAAI,OAAO;AAGX,IAAI,WAAW,SAAS,QAAQ,MAAM,UACpC,OAAO,KAIL,SAAS,SAAS,QAAQ,MAAM,QAClC,OAAO,KAGL,SACF,OAAO,GAAG,IAAI;AAAA,EAElB;AAEA,SAAO;AACT;AAWA,SAAS,mBACP,OACA,QACK;AACL,MAAI,OAAO,SAAU,YAAY,UAAU;AACzC,WAAO,CAAC;AAIV,MAAI,WAAW;AACb,WAAO,OAAO,QAAQ,KAAK;AAK7B,UAFgB,OAAO,WAAW,WAEjB;AAAA,IACf,KAAK;AACH,aAAO,OAAO,KAAK,KAAK;AAAA,IAC1B,KAAK;AACH,aAAO,OAAO,OAAO,KAAK;AAAA,IAC5B,KAAK;AACH,aAAO,OAAO,QAAQ,KAAK;AAAA,IAC7B;AACE,aAAO,OAAO,QAAQ,KAAK;AAAA,EAC/B;AACF;AAaA,SAAS,mBAAmB,OAAY,QAAwB,SAA0B;AACxF,MAAI,CAAC,MAAM,QAAQ,KAAK;AACtB,WAAO;AAIT,MAAM,gBAAgB,aAAa,OAAO,OAAO,OAAO,GAElD,SAAc,CAAC;AACrB,WAAW,OAAO;AAChB,IAAI,OAAO,OAAQ,aACjB,OAAO,GAAG,IAAI;AAIlB,SAAO;AACT;AAYA,SAAS,SACP,OACA,QACK;AACL,SAAK,MAAM,QAAQ,KAAK,IAIjB,MAAM,IAAI,UAAQ;AACvB,QAAI,OAAO,QAAS;AAClB,aAAO;AAIT,QAAI,OAAO;AACT,aAAO,OAAO,QAAQ,IAAI,KAAK;AAIjC,QAAI,OAAO;AACT,cAAQ,OAAO,MAAM;AAAA,QACnB,KAAK;AACH,iBAAO,KAAK,OAAO,CAAC,EAAE,YAAY,IAAI,KAAK,MAAM,CAAC;AAAA,QACpD,KAAK;AACH,iBAAO,KAAK,YAAY;AAAA,QAC1B,KAAK;AACH,iBAAO,KAAK,YAAY;AAAA,QAC1B;AACE,iBAAO;AAAA,MACX;AAIF,WAAO;AAAA,EACT,CAAC,IA7BQ;AA8BX;AAYA,SAAS,YACP,OACA,QACK;AACL,MAAM,EAAE,MAAM,YAAY,GAAG,IAAI;AAEjC,UAAQ,MAAM;AAAA,IACZ,KAAK;AAEH,aAAK,MAAM,QAAQ,KAAK,IAGjB,MAAM,KAAK,SAAS,IAFlB;AAAA,IAIX,KAAK;AAEH,aAAI,OAAO,SAAU,WACZ,QAEF,MAAM,MAAM,SAAS,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC,EAAE,OAAO,OAAK,EAAE,SAAS,CAAC;AAAA,IAE3E,KAAK;AAEH,aAAK,MAAM,QAAQ,KAAK,IAGjB,MAAM,OAAO,CAAC,KAAK,MAAM,OAAO,OAAO,CAAC,KAAK,IAAI,CAAC,IAFhD;AAAA,IAIX,KAAK;AAEH,aAAK,MAAM,QAAQ,KAAK,IAGjB,MAAM,SAFJ;AAAA,IAIX;AACE,aAAO;AAAA,EACX;AACF;AAgBA,SAAS,aACP,OACA,QACK;AACL,MAAI,OAAO,SAAU;AACnB,WAAO;AAGT,MAAM,QAAQ,OAAO,SAAS,IACxB,QAAQ,IAAI,OAAO,OAAO,SAAS,KAAK;AAE9C,SAAO,MAAM,QAAQ,OAAO,OAAO,IAAI;AACzC;AASA,SAAS,eACP,OACA,QACK;AACL,MAAI,OAAO,SAAU,YAAY,UAAU;AACzC,WAAO;AAGT,MAAM,SAA8B,CAAC;AAErC,WAAW,CAAC,UAAU,UAAU,KAAK,OAAO,QAAQ,KAAK,GAAG;AAC1D,QAAM,YAAY,OAAO,OAAO,UAAU,OAAO,UAAU,IAAI;AAE/D,aAAW,CAAC,YAAY,OAAO,KAAK,OAAO,QAAQ,OAAO,QAAQ;AAEhE,UADc,IAAI,OAAO,OAAO,EACtB,KAAK,SAAS,GAAG;AAEzB,QAAK,OAAO,UAAU,MACpB,OAAO,UAAU,IAAI,CAAC,IAExB,OAAO,UAAU,EAAE,QAAQ,IAAI;AAC/B;AAAA,MACF;AAAA,EAEJ;AAEA,SAAO;AACT;AAKA,SAAS,aACP,OACA,QACK;AACL,MAAI,OAAO,SAAU;AACnB,WAAO;AAGT,MAAM,QAAQ,IAAI,OAAO,OAAO,OAAO,GACjC,QAAQ,MAAM,MAAM,KAAK;AAE/B,SAAK,QAOE,MAAM,OAAO,KAAK,IANnB,OAAO,YAAY,UACd,QAEF,OAAO,YAAY,SAAY,OAAO,UAAU;AAI3D;AAKA,SAAS,eACP,OACA,QACA,SACK;AACL,MAAI,OAAO,SAAU,YAAY,UAAU;AACzC,WAAO;AAGT,MAAM,SAAc,CAAC;AAErB,WAAW,CAAC,KAAK,GAAG,KAAK,OAAO,QAAQ,KAAK,GAAG;AAC9C,QAAI,mBAAmB;AAEvB,aAAW,QAAQ,OAAO;AACxB,yBAAmB,kBAAkB,kBAAkB,MAAM,OAAO;AAGtE,WAAO,GAAG,IAAI;AAAA,EAChB;AAEA,SAAO;AACT;AAKA,SAAS,iBACP,OACA,QACK;AACL,MAAI,OAAO,SAAU,YAAY,UAAU;AACzC,WAAO;AAGT,MAAM,SAAS,EAAE,GAAG,MAAM,GACpB,SAAc,CAAC;AAErB,WAAW,aAAa,OAAO;AAC7B,QAAI,aAAa,QAAQ;AACvB,UAAM,cAAc,OAAO,SAAS;AACpC,MAAI,OAAO,eAAgB,YAAY,gBAAgB,QACrD,OAAO,OAAO,QAAQ,WAAW,GAG/B,OAAO,WAAW,MACpB,OAAO,OAAO,SAAS;AAAA,IAE3B;AAGF,SAAI,OAAO,KAAK,MAAM,EAAE,SAAS,MAC/B,OAAO,OAAO,EAAE,IAAI,SAGf;AACT;AAKO,SAAS,iBAAiB,WAAoE;AACnG,MAAM,SAAmB,CAAC;AAE1B,MAAI,CAAC,UAAU,aAAa,OAAO,UAAU,aAAc;AACzD,kBAAO,KAAK,6BAA6B,GAClC,EAAE,OAAO,IAAO,OAAO;AAGhC,MAAM,SAAS,UAAU;AAMzB,OAJI,CAAC,OAAO,SAAS,OAAO,OAAO,SAAU,aAC3C,OAAO,KAAK,4CAA4C,GAGtD,CAAC,OAAO,cAAc,CAAC,MAAM,QAAQ,OAAO,UAAU;AACxD,kBAAO,KAAK,uCAAuC,GAC5C,EAAE,OAAO,OAAO,WAAW,GAAG,OAAO;AAG9C,EAAI,OAAO,WAAW,WAAW,KAC/B,OAAO,KAAK,uDAAuD;AAGrE,WAAS,IAAI,GAAG,IAAI,OAAO,WAAW,QAAQ,KAAK;AACjD,QAAM,OAAO,OAAO,WAAW,CAAC;AAEhC,QAAI,CAAC,QAAQ,OAAO,QAAS,UAAU;AACrC,aAAO,KAAK,wBAAwB,CAAC,qBAAqB;AAC1D;AAAA,IACF;AAEA,QAAM,WAAW,OAAO,KAAK,IAAI;AACjC,QAAI,SAAS,WAAW,GAAG;AACzB,aAAO,KAAK,wBAAwB,CAAC,mCAAmC;AACxE;AAAA,IACF;AAEA,QAAMC,aAAY,SAAS,CAAC,GACtB,WAAW;AAAA,MACf;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAUA,QARK,SAAS,SAASA,UAAS,KAC9B,OAAO;AAAA,MACL,wBAAwB,CAAC,4BAA4BA,UAAS,aACpD,SAAS,KAAK,IAAI,CAAC;AAAA,IAC/B,GAIEA,eAAc,WAAW;AAC3B,UAAM,eAAgB,KAAa;AACnC,MAAI,gBAAgB,OAAO,gBAAiB,YAAY,aAAa,SAE/D,OADU,aAAa,SACN,YACnB,OAAO,KAAK,wBAAwB,CAAC,mCAAmC;AAAA,IAG9E;AAEA,QAAIA,eAAc,kBAAkB;AAClC,UAAMC,UAAU,KAAa;AAC7B,UAAIA,YAAW,MAAQ,OAAOA,WAAW;AACvC,eAAO,KAAK,wBAAwB,CAAC,4CAA4C;AAAA,eACxE,OAAOA,WAAW,YAAYA,QAAO,SAAS;AACvD,YAAM,gBAAgB,CAAC,QAAQ,UAAU,SAAS;AAClD,QAAK,cAAc,SAASA,QAAO,OAAO,KACxC,OAAO;AAAA,UACL,wBAAwB,CAAC,4CAA4C,cAAc,KAAK,IAAI,CAAC;AAAA,QAC/F;AAAA,MAEJ;AAAA,IACF;AAEA,QAAID,eAAc,kBAAkB;AAClC,UAAMC,UAAU,KAAa;AAC7B,MAAI,CAACA,WAAU,OAAOA,WAAW,WAC/B,OAAO,KAAK,wBAAwB,CAAC,oCAAoC,IAC9D,WAAWA,WACtB,OAAO,KAAK,wBAAwB,CAAC,+CAA+C;AAAA,IAExF;AAEA,QAAID,eAAc,QAAQ;AACxB,UAAMC,UAAU,KAAa;AAC7B,UAAI,CAACA,WAAU,OAAOA,WAAW;AAC/B,eAAO,KAAK,wBAAwB,CAAC,0BAA0B;AAAA,WAC1D;AACL,YAAM,UAAU,UAAUA,SACpB,aAAa,aAAaA;AAGhC,YAAI,CAAC,WAAW,CAAC;AACf,iBAAO,KAAK,wBAAwB,CAAC,sDAAsD;AAAA,iBAClF,WAAW;AACpB,iBAAO,KAAK,wBAAwB,CAAC,yDAAyD;AAAA,iBACrF,SAAS;AAElB,cAAM,YAAY,CAAC,cAAc,aAAa,WAAW;AACzD,UAAK,UAAU,SAASA,QAAO,IAAI,KACjC,OAAO;AAAA,YACL,wBAAwB,CAAC,+BAA+B,UAAU,KAAK,IAAI,CAAC;AAAA,UAC9E;AAAA,QAEJ,MAAO,CAAI,eAEL,OAAOA,QAAO,WAAY,YAAYA,QAAO,YAAY,QAAQ,MAAM,QAAQA,QAAO,OAAO,IAC/F,OAAO,KAAK,wBAAwB,CAAC,iDAAiD,IAC7E,OAAO,KAAKA,QAAO,OAAO,EAAE,WAAW,KAChD,OAAO,KAAK,wBAAwB,CAAC,+CAA+C;AAAA,MAG1F;AAAA,IACF;AAEA,QAAID,eAAc,WAAW;AAC3B,UAAMC,UAAU,KAAa;AAC7B,UAAI,CAACA,WAAU,OAAOA,WAAW;AAC/B,eAAO,KAAK,wBAAwB,CAAC,6BAA6B;AAAA,eACzD,CAACA,QAAO;AACjB,eAAO,KAAK,wBAAwB,CAAC,uCAAuC;AAAA,WACvE;AACL,YAAM,aAAa,CAAC,QAAQ,SAAS,OAAO,OAAO;AACnD,QAAK,WAAW,SAASA,QAAO,IAAI,KAClC,OAAO;AAAA,UACL,wBAAwB,CAAC,kCAAkC,WAAW,KAAK,IAAI,CAAC;AAAA,QAClF,IAEGA,QAAO,SAAS,UAAUA,QAAO,SAAS,YAAYA,QAAO,cAAc,UAAa,OAAOA,QAAO,aAAc,YACvH,OAAO,KAAK,wBAAwB,CAAC,sCAAsC;AAAA,MAE/E;AAAA,IACF;AAEA,QAAID,eAAc,YAAY;AAC5B,UAAMC,UAAU,KAAa;AAC7B,MAAI,CAACA,WAAU,OAAOA,WAAW,WAC/B,OAAO,KAAK,wBAAwB,CAAC,8BAA8B,KAE/D,OAAOA,QAAO,WAAY,YAC5B,OAAO,KAAK,wBAAwB,CAAC,qCAAqC,GAExE,OAAOA,QAAO,QAAS,YACzB,OAAO,KAAK,wBAAwB,CAAC,kCAAkC,GAErEA,QAAO,UAAU,UAAa,OAAOA,QAAO,SAAU,YACxD,OAAO,KAAK,wBAAwB,CAAC,mCAAmC;AAAA,IAG9E;AAEA,QAAID,eAAc,cAAc;AAC9B,UAAMC,UAAU,KAAa;AAC7B,MAAI,CAACA,WAAU,OAAOA,WAAW,WAC/B,OAAO,KAAK,wBAAwB,CAAC,gCAAgC,MAEjE,CAACA,QAAO,MAAM,CAAC,CAAC,SAAS,KAAK,EAAE,SAASA,QAAO,EAAE,MACpD,OAAO,KAAK,wBAAwB,CAAC,0CAA0C,GAE7E,CAACA,QAAO,YAAY,OAAOA,QAAO,YAAa,WACjD,OAAO,KAAK,wBAAwB,CAAC,yCAAyC,IACrE,OAAO,KAAKA,QAAO,QAAQ,EAAE,WAAW,KACjD,OAAO,KAAK,wBAAwB,CAAC,sDAAsD;AAAA,IAGjG;AAEA,QAAID,eAAc,YAAY;AAC5B,UAAMC,UAAU,KAAa;AAC7B,MAAI,CAACA,WAAU,OAAOA,WAAW,WAC/B,OAAO,KAAK,wBAAwB,CAAC,8BAA8B,KAE/D,OAAOA,QAAO,WAAY,YAC5B,OAAO,KAAK,wBAAwB,CAAC,qCAAqC,GAExE,OAAOA,QAAO,SAAU,YAC1B,OAAO,KAAK,wBAAwB,CAAC,mCAAmC;AAAA,IAG9E;AAEA,QAAID,eAAc,cAAc;AAC9B,UAAMC,UAAU,KAAa;AAC7B,MAAI,CAACA,WAAU,OAAOA,WAAW,WAC/B,OAAO,KAAK,wBAAwB,CAAC,gCAAgC,IAC3D,MAAM,QAAQA,QAAO,UAAU,IAEhCA,QAAO,WAAW,WAAW,KACtC,OAAO,KAAK,wBAAwB,CAAC,0DAA0D,IAF/F,OAAO,KAAK,wBAAwB,CAAC,0CAA0C;AAAA,IAInF;AAEA,QAAID,eAAc,gBAAgB;AAChC,UAAMC,UAAU,KAAa;AAC7B,MAAI,CAACA,WAAU,OAAOA,WAAW,WAC/B,OAAO,KAAK,wBAAwB,CAAC,kCAAkC,MAEnE,CAAC,MAAM,QAAQA,QAAO,IAAI,KAAKA,QAAO,KAAK,WAAW,MACxD,OAAO,KAAK,wBAAwB,CAAC,+CAA+C,GAElF,OAAOA,QAAO,MAAO,YACvB,OAAO,KAAK,wBAAwB,CAAC,oCAAoC;AAAA,IAG/E;AAAA,EACF;AAEA,SAAO;AAAA,IACL,OAAO,OAAO,WAAW;AAAA,IACzB;AAAA,EACF;AACF;;;ACxpBO,SAAS,YACd,UACA,WACK;AACL,MAAM,SAAS,EAAE,GAAG,SAAS,GACvB,EAAE,MAAM,IAAI,UAAU,IAAI,UAAU,OAGpC,cAAc,eAAe,QAAQ,IAAI;AAG/C,MAAI,gBAAgB;AAClB,WAAO;AAGT,MAAI,cAAc;AAGlB,SAAI,cACF,cAAc,eAAe,aAAa,SAAS,IAIrD,eAAe,QAAQ,IAAI,WAAW,GAE/B;AACT;AAKA,SAAS,eACP,OACA,WACK;AAEL,WAAW,EAAE,SAAS,OAAO,UAAU,KAAK,UAAU;AACpD,QAAI,aAAa,OAAO,OAAO;AAC7B,aAAO;AAKX,SAAO,UAAU,YAAY,SAAY,UAAU,UAAU;AAC/D;AAKO,SAAS,aAAa,WAAgE;AAC3F,MAAM,SAAmB,CAAC;AAE1B,MAAI,CAAC,UAAU,SAAS,OAAO,UAAU,SAAU;AACjD,kBAAO,KAAK,yBAAyB,GAC9B,EAAE,OAAO,IAAO,OAAO;AAGhC,MAAM,SAAS,UAAU;AAWzB,OATI,CAAC,OAAO,QAAQ,OAAO,OAAO,QAAS,aACzC,OAAO,KAAK,uCAAuC,IAGjD,CAAC,OAAO,MAAM,OAAO,OAAO,MAAO,aACrC,OAAO,KAAK,qCAAqC,GAI/C,OAAO,WAAW;AACpB,QAAI,OAAO,OAAO,aAAc;AAC9B,oBAAO,KAAK,mCAAmC,GACxC,EAAE,OAAO,OAAO,WAAW,GAAG,OAAO;AAG9C,QAAI,CAAC,OAAO,UAAU,SAAS,CAAC,MAAM,QAAQ,OAAO,UAAU,KAAK;AAClE,aAAO,KAAK,wCAAwC;AAAA,SAC/C;AACL,MAAI,OAAO,UAAU,MAAM,WAAW,KACpC,OAAO,KAAK,mDAAmD;AAGjE,eAAS,IAAI,GAAG,IAAI,OAAO,UAAU,MAAM,QAAQ,KAAK;AACtD,YAAM,WAAW,OAAO,UAAU,MAAM,CAAC;AAEzC,YAAI,CAAC,YAAY,OAAO,YAAa,UAAU;AAC7C,iBAAO,KAAK,yBAAyB,CAAC,qBAAqB;AAC3D;AAAA,QACF;AAEA,QAAM,aAAa,YACjB,OAAO,KAAK,yBAAyB,CAAC,+BAA+B,GAGjE,WAAW,YACf,OAAO,KAAK,yBAAyB,CAAC,6BAA6B;AAAA,MAEvE;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,OAAO,OAAO,WAAW;AAAA,IACzB;AAAA,EACF;AACF;;;ACtGO,SAAS,YACd,UACA,WACA,mBACK;AACL,MAAI,SAAS;AAGb,WAAW,iBAAiB,UAAU;AACpC,QAAI;AACF,eAAS,kBAAkB,QAAQ,eAAe,MAAM;AAAA,IAC1D,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,oBAAoB,aAAa,aAC/B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CACvD;AAAA,MACF;AAAA,IACF;AAGF,SAAO;AACT;AAOO,SAAS,aAAa,WAAgE;AAC3F,MAAM,SAAmB,CAAC;AAE1B,MAAI,CAAC,UAAU;AACb,kBAAO,KAAK,uBAAuB,GAC5B,EAAE,OAAO,IAAO,OAAO;AAGhC,MAAI,CAAC,MAAM,QAAQ,UAAU,KAAK;AAChC,kBAAO,KAAK,2CAA2C,GAChD,EAAE,OAAO,IAAO,OAAO;AAGhC,EAAI,UAAU,MAAM,WAAW,KAC7B,OAAO,KAAK,wCAAwC;AAGtD,WAAS,IAAI,GAAG,IAAI,UAAU,MAAM,QAAQ,KAAK;AAC/C,QAAM,gBAAgB,UAAU,MAAM,CAAC;AAEvC,IAAI,OAAO,iBAAkB,WAC3B,OAAO,KAAK,SAAS,CAAC,qCAAqC,KAClD,CAAC,iBAAiB,cAAc,KAAK,MAAM,OACpD,OAAO,KAAK,SAAS,CAAC,kCAAkC;AAAA,EAE5D;AAEA,SAAO;AAAA,IACL,OAAO,OAAO,WAAW;AAAA,IACzB;AAAA,EACF;AACF;;;ACzDO,SAAS,iBACd,UACA,UACA,SACA,mBACK;AAEL,MAAI,SAAS,UAAU,QAAQ;AAG/B,WAAW,aAAa;AACtB,aAAS,iBAAiB,QAAQ,WAAW,SAAS,iBAAiB;AAGzE,SAAO;AACT;AAKA,SAAS,iBACP,UACA,WACA,SACA,mBACK;AACL,MAAI,UAAU;AACZ,WAAO,WAAW,UAAU,WAAW,OAAO;AAGhD,MAAI,aAAa;AACf,WAAO,cAAc,UAAU,SAAS;AAG1C,MAAI,YAAY;AACd,WAAO,aAAa,UAAU,SAAS;AAGzC,MAAI,aAAa;AACf,WAAO,cAAc,UAAU,SAAS;AAG1C,MAAI,eAAe;AACjB,WAAO,gBAAgB,UAAU,WAAW,OAAO;AAGrD,MAAI,WAAW;AACb,WAAO,YAAY,UAAU,SAAS;AAGxC,MAAI,WAAW,WAAW;AACxB,QAAI,CAAC;AACH,YAAM,IAAI,MAAM,4DAA4D;AAE9E,WAAO,YAAY,UAAU,WAAW,iBAAiB;AAAA,EAC3D;AAGA,SAAO;AACT;AAOO,SAAS,oBAAoB,UAAyC;AAC3E,MAAM,SAAmB,CAAC;AAE1B,MAAI,CAAC,MAAM,QAAQ,QAAQ;AACzB,WAAO;AAAA,MACL,OAAO;AAAA,MACP,QAAQ,CAAC,+BAA+B;AAAA,IAC1C;AAGF,MAAI,SAAS,WAAW;AACtB,WAAO;AAAA,MACL,OAAO;AAAA,MACP,QAAQ,CAAC,+CAA+C;AAAA,IAC1D;AAGF,WAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AACxC,QAAM,YAAY,SAAS,CAAC;AAE5B,QAAI,CAAC,aAAa,OAAO,aAAc,UAAU;AAC/C,aAAO,KAAK,sBAAsB,CAAC,oBAAoB;AACvD;AAAA,IACF;AAGA,QAAM,gBAAgB,OAAO,KAAK,SAAS,GACrC,kBAAkB,CAAC,QAAQ,WAAW,UAAU,WAAW,aAAa,SAAS,OAAO,GACxF,eAAe,cAAc,KAAK,SAAO,gBAAgB,SAAS,GAAG,CAAC;AAE5E,QAAI,CAAC,cAAc;AACjB,aAAO;AAAA,QACL,sBAAsB,CAAC,sBAAsB,gBAAgB,KAAK,IAAI,CAAC;AAAA,MACzE;AACA;AAAA,IACF;AAEA,QAAI,cAAc,SAAS,GAAG;AAC5B,aAAO;AAAA,QACL,sBAAsB,CAAC,4CAA4C,cAAc,KAAK,IAAI,CAAC;AAAA,MAC7F;AACA;AAAA,IACF;AAGA,QAAM,aAAa,kBAAkB,SAAS;AAC9C,IAAK,WAAW,SACd,OAAO;AAAA,MACL,sBAAsB,CAAC,KAAK,YAAY,MAAM,WAAW,OAAO,KAAK,IAAI,CAAC;AAAA,IAC5E;AAAA,EAEJ;AAEA,SAAO;AAAA,IACL,OAAO,OAAO,WAAW;AAAA,IACzB;AAAA,EACF;AACF;AAKA,SAAS,kBAAkB,WAAwC;AACjE,SAAI,UAAU,YACL,YAAY,SAAS,IAG1B,aAAa,YACR,eAAe,SAAS,IAG7B,YAAY,YACP,cAAc,SAAS,IAG5B,aAAa,YACR,eAAe,SAAS,IAG7B,eAAe,YACV,iBAAiB,SAAS,IAG/B,WAAW,YACN,aAAa,SAAS,IAG3B,WAAW,YACN,aAAa,SAAS,IAGxB;AAAA,IACL,OAAO;AAAA,IACP,QAAQ,CAAC,wBAAwB;AAAA,EACnC;AACF;AASO,SAAS,iBAAiB,UAAyE;AACxG,MAAM,YAAyB,CAAC,GAC1B,UAAuB,CAAC;AAE9B,WAAW,MAAM;AACf,IAAI,MAAM,OAAO,MAAO,YAAY,WAAW,KAC7C,QAAQ,KAAK,EAAE,IAEf,UAAU,KAAK,EAAE;AAIrB,SAAO,EAAE,WAAW,QAAQ;AAC9B;AAKO,SAAS,iBAAiB,SAKlB;AACb,SAAO;AAAA,IACL,UAAU,QAAQ;AAAA,IAClB,SAAS,QAAQ;AAAA,IACjB,MAAM,QAAQ;AAAA,IACd,KAAK,QAAQ;AAAA,EACf;AACF;;;ACpNA,OAAOC,WAAU;AACjB,YAAY,UAAU;;;ACnBtB,OAAOC,WAAU;AAeV,SAAS,sBACd,SACA,SACkB;AAClB,MAAM,mBAAmB,2CACnB,QAAQ,QAAQ,MAAM,gBAAgB;AAE5C,MAAI,CAAC;AACH,WAAO,EAAE,MAAM,QAAQ;AAGzB,MAAM,CAAC,EAAE,gBAAgB,IAAI,IAAI;AAEjC,MAAI;AAEF,WAAO,EAAE,aADYA,MAAK,KAAK,cAAc,KAAK,CAAC,GAC7B,KAAK;AAAA,EAC7B,SAAS,OAAO;AACd,QAAI,SAAS;AACX,aAAO,EAAE,MAAM,QAAQ;AAEzB,UAAM;AAAA,EACR;AACF;AAOO,SAAS,0BAA0B,SAAsB;AAC9D,MAAI,OAAO,WAAY;AACrB,WAAO;AAGT,MAAM,OAAO,OAAO,SAAS,QAAS,WAAW,QAAQ,OAAO;AAGhE,SAFuB,WAAW,OAAO,WAAY,YAAY,iBAAiB,WAAW,QAAQ,cAc9F;AAAA,EARiBA,MAAK,KAAK,QAAQ,aAAa;AAAA,IACrD,QAAQ;AAAA,IACR,WAAW;AAAA,IACX,WAAW;AAAA,IACX,QAAQ;AAAA,EACV,CAAC,CAG6B;AAAA;AAAA,EAAU,IAAI,KAXnC;AAYX;;;AD9BO,IAAM,oBAAN,MAAwB;AAAA,EAAxB;AACL,SAAQ,aAAa,oBAAI,IAAuB;AAAA;AAAA;AAAA;AAAA;AAAA,EAKhD,SAAS,WAA4B;AACnC,SAAK,WAAW,IAAI,UAAU,MAAM,SAAS;AAAA,EAC/C;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,MAAqC;AACvC,WAAO,KAAK,WAAW,IAAI,IAAI;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,MAAuB;AACzB,WAAO,KAAK,WAAW,IAAI,IAAI;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA,EAKA,QAAQ,MAAc,OAAY,SAAoB;AACpD,QAAM,YAAY,KAAK,IAAI,IAAI;AAC/B,QAAI,CAAC;AACH,YAAM,IAAI,MAAM,wBAAwB,IAAI,EAAE;AAIhD,QAAI,UAAU,YAAY,CAAC,UAAU,SAAS,OAAO;AACnD,YAAM,IAAI,MAAM,kCAAkC,IAAI,EAAE;AAG1D,WAAO,UAAU,QAAQ,OAAO,OAAO;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA,EAKA,OAAiB;AACf,WAAO,MAAM,KAAK,KAAK,WAAW,KAAK,CAAC;AAAA,EAC1C;AACF,GASa,iBAA4B;AAAA,EACvC,MAAM;AAAA,EACN,QAAQ,OAAoB;AAC1B,QAAI,OAAO,SAAU;AACnB,aAAO;AAIT,QAAM,WAAW,MAEd,QAAQ,aAAa,EAAE,EAEvB,QAAQ,qBAAqB,EAAE,EAE/B,QAAQ,gBAAgB,IAAI;AAE/B,WAAO,KAAK,MAAM,QAAQ;AAAA,EAC5B;AACF,GAWa,gBAA2B;AAAA,EACtC,MAAM;AAAA,EACN,QAAQ,OAAY,SAAsD;AAGxE,YAFkB,SAAS,aAAa,aAEtB,UACZ,OAAO,SAAU,WACZ,QAEFC,MAAK,KAAK,KAAK,IAGfA,MAAK,KAAK,OAAO;AAAA,MACtB,QAAQ;AAAA,MACR,WAAW;AAAA;AAAA,MACX,WAAW;AAAA;AAAA,MACX,QAAQ;AAAA;AAAA,IACV,CAAC;AAAA,EAEL;AACF,GAaa,gBAA2B;AAAA,EACtC,MAAM;AAAA,EACN,QAAQ,OAAY,SAAsD;AAGxE,SAFkB,SAAS,aAAa,aAEtB,SAAS;AACzB,UAAI,OAAO,SAAU;AACnB,eAAO;AAET,UAAI;AACF,eAAY,WAAM,KAAK;AAAA,MACzB,SAAS,OAAO;AACd,cAAM,IAAI,MAAM,qBAAqB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,EAAE;AAAA,MAC/F;AAAA,IACF;AAEE,UAAI;AACF,eAAY,eAAU,KAAK;AAAA,MAC7B,SAAS,OAAO;AACd,cAAM,IAAI,MAAM,yBAAyB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,EAAE;AAAA,MACnG;AAAA,EAEJ;AACF,GAKa,sBAAiC;AAAA,EAC5C,MAAM;AAAA,EACN,QAAQ,OAAoB;AAC1B,WAAO,cAAc,QAAQ,OAAO,EAAE,WAAW,YAAY,CAAC;AAAA,EAChE;AACF,GAKa,sBAAiC;AAAA,EAC5C,MAAM;AAAA,EACN,QAAQ,OAAoB;AAC1B,WAAO,cAAc,QAAQ,OAAO,EAAE,WAAW,QAAQ,CAAC;AAAA,EAC5D;AACF,GAKa,sBAAiC;AAAA,EAC5C,MAAM;AAAA,EACN,QAAQ,OAAoB;AAC1B,WAAO,cAAc,QAAQ,OAAO,EAAE,WAAW,YAAY,CAAC;AAAA,EAChE;AACF,GAKa,sBAAiC;AAAA,EAC5C,MAAM;AAAA,EACN,QAAQ,OAAoB;AAC1B,WAAO,cAAc,QAAQ,OAAO,EAAE,WAAW,QAAQ,CAAC;AAAA,EAC5D;AACF,GASa,0BAAqC;AAAA,EAChD,MAAM;AAAA,EACN,QAAQ,OAAiB;AACvB,WAAI,OAAO,SAAU,WACZ,QAGF,MACJ,QAAQ,aAAa,EAAE,EACvB,QAAQ,qBAAqB,EAAE,EAC/B,QAAQ,cAAc,EAAE;AAAA,EAC7B;AACF,GAKa,uBAAkC;AAAA,EAC7C,MAAM;AAAA,EACN,QAAQ,OAAY,SAAwC;AAC1D,QAAM,YAAY,SAAS,aAAa;AAExC,QAAI,MAAM,QAAQ,KAAK;AAarB,aAZiB,MACd,OAAO,UACF,WAAS,MAAO,MAAM,QAAQ,IAAI,KAAK,KAAK,WAAW,KAGvD,OAAO,QAAS,YAAY,SAAS,QAAQ,OAAO,KAAK,IAAI,EAAE,WAAW,EAI/E,EACA,IAAI,UAAQ,aAAa,OAAO,QAAS,WAAW,qBAAqB,QAAQ,MAAM,OAAO,IAAI,IAAI;AAK3G,QAAI,OAAO,SAAU,YAAY,UAAU,MAAM;AAC/C,UAAM,WAAgB,CAAC;AACvB,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,KAAK;AAC7C,QAAI,UAAU,MAAO,MAAM,QAAQ,KAAK,KAAK,MAAM,WAAW,KAG1D,OAAO,SAAU,YAAY,UAAU,QAAQ,OAAO,KAAK,KAAK,EAAE,WAAW,MAGjF,SAAS,GAAG,IAAI,aAAa,OAAO,SAAU,WAAW,qBAAqB,QAAQ,OAAO,OAAO,IAAI;AAE1G,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AACF,GAKa,sBAAiC;AAAA,EAC5C,MAAM;AAAA,EACN,QAAQ,OAAY,SAAwC;AAC1D,QAAM,YAAY,SAAS,aAAa;AAExC,QAAI,MAAM,QAAQ,KAAK;AACrB,aAAO,MACJ,OAAO,UAAQ,QAAS,IAA0B,EAClD,IAAI,UAAQ,aAAa,OAAO,QAAS,WAAW,oBAAoB,QAAQ,MAAM,OAAO,IAAI,IAAI;AAG1G,QAAI,OAAO,SAAU,YAAY,UAAU,MAAM;AAC/C,UAAM,WAAgB,CAAC;AACvB,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,KAAK;AAC7C,QAAI,SAAU,SAGd,SAAS,GAAG,IAAI,aAAa,OAAO,SAAU,WAAW,oBAAoB,QAAQ,OAAO,OAAO,IAAI;AAEzG,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AACF,GASa,oBAA+B;AAAA,EAC1C,MAAM;AAAA,EACN,QAAQ,OAAe,SAAsD;AAC3E,QAAI,OAAO,SAAU;AACnB,aAAO,EAAE,SAAS,OAAO,KAAK,EAAE;AAGlC,QAAM,QAAQ,SAAS,SAAS,GAC1B,cAAc,IAAI,OAAO,MAAM,KAAK,cAAc,IAAI,GACtD,WAAmC,CAAC,GAEtC,YAAY,GACZ,YAAY,aACZ;AAEJ,YAAQ,QAAQ,YAAY,KAAK,KAAK,OAAO;AAE3C,MAAI,YAAY,MAAM,UACpB,SAAS,SAAS,IAAI,MAAM,MAAM,WAAW,MAAM,KAAK,EAAE,KAAK,IAIjE,YAAY,MAAM,CAAC,EAAE,KAAK,GAC1B,YAAY,MAAM,QAAQ,MAAM,CAAC,EAAE;AAIrC,WAAI,YAAY,MAAM,WACpB,SAAS,SAAS,IAAI,MAAM,MAAM,SAAS,EAAE,KAAK,IAG7C;AAAA,EACT;AACF,GAiBa,oBAA+B;AAAA,EAC1C,MAAM;AAAA,EACN,QAAQ,OAAY,SAAsD;AAExE,YADkB,SAAS,aAAa,aACtB,UACZ,OAAO,SAAU,WACZ,QAEF,sBAAsB,KAAK,IAE7B,0BAA0B,KAAK;AAAA,EACxC;AACF,GAKa,0BAAqC;AAAA,EAChD,MAAM;AAAA,EACN,QAAQ,OAAoB;AAC1B,WAAO,kBAAkB,QAAQ,OAAO,EAAE,WAAW,QAAQ,CAAC;AAAA,EAChE;AACF,GAKa,0BAAqC;AAAA,EAChD,MAAM;AAAA,EACN,QAAQ,OAAoB;AAC1B,WAAO,kBAAkB,QAAQ,OAAO,EAAE,WAAW,YAAY,CAAC;AAAA,EACpE;AACF,GAKa,uBAAkC;AAAA,EAC7C,MAAM;AAAA,EACN,QAAQ,OAAoB;AAC1B,QAAI,OAAO,SAAU;AACnB,aAAO,CAAC;AAGV,QAAM,QAAQ,MAAM,MAAM,+BAA+B;AACzD,QAAI,CAAC;AACH,aAAO,CAAC;AAGV,QAAI;AACF,aAAOA,MAAK,KAAK,MAAM,CAAC,CAAC,KAAK,CAAC;AAAA,IACjC,SAAS,OAAO;AACd,oBAAO,KAAK,gCAAgC,KAAK,EAAE,GAC5C,CAAC;AAAA,IACV;AAAA,EACF;AACF,GAKa,gBAA2B;AAAA,EACtC,MAAM;AAAA,EACN,QAAQ,OAAuB;AAC7B,QAAI,OAAO,SAAU;AACnB,aAAO,OAAO,KAAK;AAGrB,QAAM,QAAQ,MAAM,MAAM,uCAAuC;AACjE,WAAI,QACK,MAAM,CAAC,EAAE,KAAK,IAGhB,MAAM,KAAK;AAAA,EACpB;AACF,GAUa,kBAA6B;AAAA,EACxC,MAAM;AAAA,EACN,QAAQ,OAAoB;AAC1B,QAAM,MAAM,OAAO,KAAK;AACxB,QAAI,MAAM,GAAG;AACX,YAAM,IAAI,MAAM,6BAA6B,KAAK,EAAE;AAEtD,WAAO;AAAA,EACT;AACF,GAEa,kBAA6B;AAAA,EACxC,MAAM;AAAA,EACN,QAAQ,OAAoB;AAC1B,WAAO,OAAO,KAAK;AAAA,EACrB;AACF,GAEa,mBAA8B;AAAA,EACzC,MAAM;AAAA,EACN,QAAQ,OAAqB;AAC3B,QAAI,OAAO,SAAU,UAAW,QAAO;AACvC,QAAI,OAAO,SAAU,UAAU;AAC7B,UAAM,QAAQ,MAAM,YAAY;AAChC,UAAI,UAAU,UAAU,UAAU,OAAO,UAAU,MAAO,QAAO;AACjE,UAAI,UAAU,WAAW,UAAU,OAAO,UAAU,KAAM,QAAO;AAAA,IACnE;AACA,WAAO,EAAQ;AAAA,EACjB;AACF,GAEa,gBAA2B;AAAA,EACtC,MAAM;AAAA,EACN,QAAQ,OAAiB;AACvB,WAAI,OAAO,SAAU,WACZ,KAAK,MAAM,KAAK,IAElB;AAAA,EACT;AACF,GAEa,gBAA2B;AAAA,EACtC,MAAM;AAAA,EACN,QAAQ,OAAkB;AACxB,QAAM,OAAO,IAAI,KAAK,KAAK;AAC3B,QAAI,MAAM,KAAK,QAAQ,CAAC;AACtB,YAAM,IAAI,MAAM,2BAA2B,KAAK,EAAE;AAEpD,WAAO;AAAA,EACT;AACF,GAMa,qBAAgC;AAAA,EAC3C,MAAM;AAAA,EACN,QAAQ,OAAoB;AAC1B,WAAO,OAAO,KAAK,EAAE,YAAY;AAAA,EACnC;AACF,GAEa,qBAAgC;AAAA,EAC3C,MAAM;AAAA,EACN,QAAQ,OAAoB;AAC1B,WAAO,OAAO,KAAK,EAAE,YAAY;AAAA,EACnC;AACF,GAEa,gBAA2B;AAAA,EACtC,MAAM;AAAA,EACN,QAAQ,OAAoB;AAC1B,WAAO,OAAO,KAAK,EAAE,KAAK;AAAA,EAC5B;AACF,GAEa,qBAAgC;AAAA,EAC3C,MAAM;AAAA,EACN,QAAQ,OAAoB;AAC1B,WAAO,OAAO,KAAK,EAChB,YAAY,EACZ,MAAM,KAAK,EACX,IAAI,UAAQ,KAAK,OAAO,CAAC,EAAE,YAAY,IAAI,KAAK,MAAM,CAAC,CAAC,EACxD,KAAK,GAAG;AAAA,EACb;AACF,GAEa,qBAAgC;AAAA,EAC3C,MAAM;AAAA,EACN,QAAQ,OAAoB;AAC1B,WAAO,OAAO,KAAK,EAChB,QAAQ,gBAAgB,CAAC,GAAG,MAAM,IAAI,EAAE,YAAY,IAAI,EAAE,EAC1D,QAAQ,QAAQ,CAAC,GAAG,MAAM,EAAE,YAAY,CAAC;AAAA,EAC9C;AACF,GAEa,qBAAgC;AAAA,EAC3C,MAAM;AAAA,EACN,QAAQ,OAAoB;AAC1B,WAAO,OAAO,KAAK,EAChB,QAAQ,mBAAmB,OAAO,EAClC,QAAQ,WAAW,GAAG,EACtB,YAAY;AAAA,EACjB;AACF,GAEa,qBAAgC;AAAA,EAC3C,MAAM;AAAA,EACN,QAAQ,OAAoB;AAC1B,WAAO,OAAO,KAAK,EAChB,QAAQ,mBAAmB,OAAO,EAClC,QAAQ,WAAW,GAAG,EACtB,YAAY;AAAA,EACjB;AACF,GAEa,mBAA8B;AAAA,EACzC,MAAM;AAAA,EACN,QAAQ,OAAoB;AAC1B,WAAO,OAAO,KAAK,EAChB,YAAY,EACZ,QAAQ,aAAa,EAAE,EACvB,QAAQ,WAAW,GAAG,EACtB,QAAQ,YAAY,EAAE;AAAA,EAC3B;AACF,GAMa,uBAAkC;AAAA,EAC7C,MAAM;AAAA,EACN,QAAQ,OAAY,SAAiC;AAEnD,WAAO,CAAC,GADI,MAAM,QAAQ,KAAK,IAAI,QAAQ,CAAC,KAAK,GACjC,SAAS,KAAK;AAAA,EAChC;AACF,GAEa,uBAAkC;AAAA,EAC7C,MAAM;AAAA,EACN,QAAQ,OAAmB;AACzB,WAAK,MAAM,QAAQ,KAAK,IAGjB,CAAC,GAAG,IAAI,IAAI,KAAK,CAAC,IAFhB,CAAC,KAAK;AAAA,EAGjB;AACF,GAEa,wBAAmC;AAAA,EAC9C,MAAM;AAAA,EACN,QAAQ,OAAY,SAAqC;AACvD,QAAI,CAAC,MAAM,QAAQ,KAAK;AACtB,aAAO,CAAC,KAAK;AAEf,QAAM,QAAQ,SAAS,SAAS;AAChC,WAAO,MAAM,KAAK,KAAK;AAAA,EACzB;AACF,GAMa,mBAA8B;AAAA,EACzC,MAAM;AAAA,EACN,QAAQ,OAAY,SAAuD;AACzE,QAAI,OAAO,SAAU,YAAY,UAAU;AACzC,aAAO,EAAE,OAAO,MAAM;AAGxB,QAAM,YAAY,SAAS,aAAa,KAClC,SAA8B,CAAC;AAErC,aAAS,QAAQ,KAAU,SAAS,IAAI;AACtC,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,GAAG,GAAG;AAC9C,YAAM,SAAS,SAAS,GAAG,MAAM,GAAG,SAAS,GAAG,GAAG,KAAK;AAExD,QAAI,OAAO,SAAU,YAAY,UAAU,QAAQ,CAAC,MAAM,QAAQ,KAAK,IACrE,QAAQ,OAAO,MAAM,IAErB,OAAO,MAAM,IAAI;AAAA,MAErB;AAAA,IACF;AAEA,mBAAQ,KAAK,GACN;AAAA,EACT;AACF,GAEa,qBAAgC;AAAA,EAC3C,MAAM;AAAA,EACN,QAAQ,OAAY,SAAuC;AACzD,QAAI,OAAO,SAAU,YAAY,UAAU;AACzC,aAAO;AAGT,QAAM,YAAY,SAAS,aAAa,KAClC,SAAc,CAAC;AAErB,aAAW,CAACC,OAAM,KAAK,KAAK,OAAO,QAAQ,KAAK,GAAG;AACjD,UAAM,OAAOA,MAAK,MAAM,SAAS,GAC7B,UAAU;AAEd,eAAS,IAAI,GAAG,IAAI,KAAK,SAAS,GAAG,KAAK;AACxC,YAAM,MAAM,KAAK,CAAC;AAClB,QAAM,OAAO,YACX,QAAQ,GAAG,IAAI,CAAC,IAElB,UAAU,QAAQ,GAAG;AAAA,MACvB;AAEA,cAAQ,KAAK,KAAK,SAAS,CAAC,CAAC,IAAI;AAAA,IACnC;AAEA,WAAO;AAAA,EACT;AACF,GAEa,oBAA+B;AAAA,EAC1C,MAAM;AAAA,EACN,QAAQ,OAAY,SAAmC;AACrD,QAAI,OAAO,SAAU,YAAY,UAAU;AACzC,aAAO;AAGT,QAAM,OAAO,SAAS,QAAQ,CAAC,GACzB,SAAc,CAAC;AAErB,aAAW,OAAO;AAChB,MAAI,OAAO,UACT,OAAO,GAAG,IAAI,MAAM,GAAG;AAI3B,WAAO;AAAA,EACT;AAAA,EACA,SAAS,SAAuC;AAC9C,WAAO,MAAM,QAAQ,SAAS,IAAI;AAAA,EACpC;AACF,GAEa,oBAA+B;AAAA,EAC1C,MAAM;AAAA,EACN,QAAQ,OAAY,SAAmC;AACrD,QAAI,OAAO,SAAU,YAAY,UAAU;AACzC,aAAO;AAGT,QAAM,OAAO,SAAS,QAAQ,CAAC,GACzB,SAAc,EAAE,GAAG,MAAM;AAE/B,aAAW,OAAO;AAChB,aAAO,OAAO,GAAG;AAGnB,WAAO;AAAA,EACT;AAAA,EACA,SAAS,SAAuC;AAC9C,WAAO,MAAM,QAAQ,SAAS,IAAI;AAAA,EACpC;AACF,GAMa,oBAA+B;AAAA,EAC1C,MAAM;AAAA,EACN,QAAQ,OAAY,SAAwC;AAC1D,QAAI,OAAO,SAAU,YAAY,UAAU;AACzC,YAAM,IAAI,MAAM,4CAA4C;AAI9D,QAAM,WADW,SAAS,YAAY,CAAC,GACd,OAAO,SAAO,EAAE,OAAO,MAAM;AAEtD,QAAI,QAAQ,SAAS;AACnB,YAAM,IAAI,MAAM,6CAA6C,QAAQ,KAAK,IAAI,CAAC,EAAE;AAGnF,WAAO;AAAA,EACT;AACF;AASO,SAAS,iCAAoD;AAClE,MAAM,WAAW,IAAI,kBAAkB;AAGvC,kBAAS,SAAS,cAAc,GAChC,SAAS,SAAS,aAAa,GAC/B,SAAS,SAAS,aAAa,GAC/B,SAAS,SAAS,mBAAmB,GACrC,SAAS,SAAS,mBAAmB,GACrC,SAAS,SAAS,mBAAmB,GACrC,SAAS,SAAS,mBAAmB,GAGrC,SAAS,SAAS,uBAAuB,GACzC,SAAS,SAAS,oBAAoB,GACtC,SAAS,SAAS,mBAAmB,GAGrC,SAAS,SAAS,iBAAiB,GACnC,SAAS,SAAS,iBAAiB,GACnC,SAAS,SAAS,uBAAuB,GACzC,SAAS,SAAS,uBAAuB,GACzC,SAAS,SAAS,oBAAoB,GACtC,SAAS,SAAS,aAAa,GAG/B,SAAS,SAAS,eAAe,GACjC,SAAS,SAAS,eAAe,GACjC,SAAS,SAAS,gBAAgB,GAClC,SAAS,SAAS,aAAa,GAC/B,SAAS,SAAS,aAAa,GAG/B,SAAS,SAAS,kBAAkB,GACpC,SAAS,SAAS,kBAAkB,GACpC,SAAS,SAAS,aAAa,GAC/B,SAAS,SAAS,kBAAkB,GACpC,SAAS,SAAS,kBAAkB,GACpC,SAAS,SAAS,kBAAkB,GACpC,SAAS,SAAS,kBAAkB,GACpC,SAAS,SAAS,gBAAgB,GAGlC,SAAS,SAAS,oBAAoB,GACtC,SAAS,SAAS,oBAAoB,GACtC,SAAS,SAAS,qBAAqB,GAGvC,SAAS,SAAS,gBAAgB,GAClC,SAAS,SAAS,kBAAkB,GACpC,SAAS,SAAS,iBAAiB,GACnC,SAAS,SAAS,iBAAiB,GAGnC,SAAS,SAAS,iBAAiB,GAE5B;AACT;AAKO,IAAM,2BAA2B,+BAA+B;;;AXxxBvE,SAAS,YAAAC,WAAU,WAAAC,UAAS,WAAAC,gBAAe;;;AahB3C,SAAS,eAAkC;AAG3C,SAAS,mBAAmB,OAAuB;AACjD,SAAO,MAAM,QAAQ,OAAO,GAAG;AACjC;AAEA,SAAS,cAAc,OAAyB;AAE9C,SADmB,mBAAmB,KAAK,EACzB,MAAM,GAAG,EAAE,OAAO,OAAO;AAC7C;AASO,SAAS,mCACd,UACA,sBACQ;AACR,MAAM,eAAe,cAAc,QAAQ,GACrC,iBAAiB,cAAc,oBAAoB,GAEnD,aAAa,KAAK,IAAI,aAAa,QAAQ,eAAe,MAAM,GAClE,aAAa;AAEjB,WAAS,IAAI,YAAY,KAAK,GAAG,KAAK;AACpC,QAAM,aAAa,aAAa,MAAM,CAAC,CAAC,EAAE,KAAK,GAAG,GAC5C,eAAe,eAAe,MAAM,GAAG,CAAC,EAAE,KAAK,GAAG;AACxD,QAAI,eAAe,cAAc;AAC/B,mBAAa;AACb;AAAA,IACF;AAAA,EACF;AAEA,SAAO,eAAe,MAAM,UAAU,EAAE,KAAK,GAAG;AAClD;AAEA,SAAS,0CAA0C,UAAiC;AAGlF,SAFmB,mBAAmB,QAAQ,EACrB,MAAM,YAAY,IAC5B,CAAC,KAAK;AACvB;AAEA,SAAS,0BACP,sBACA,eACQ;AACR,MAAM,iBAAiB,cAAc,oBAAoB,GACnD,cAAc,eAAe,YAAY,aAAa;AAE5D,SAAI,gBAAgB,KACX,mBAAmB,oBAAoB,IAGzC,eAAe,MAAM,cAAc,CAAC,EAAE,KAAK,GAAG;AACvD;AAUO,SAAS,uCACd,sBACA,aACA,WACQ;AACR,MAAM,UAAU,mBAAmB,SAAS,EAAE,MAAM,IAAI,GAClD,SAAS,QAAQ,CAAC,EAAE,QAAQ,OAAO,EAAE,GACrC,WAAW,QAAQ,CAAC,KAAK,IAE3B,kBAAkB;AAEtB,MAAI,YAAY,SAAS,IAAI,GAAG;AAC9B,QAAM,YAAY,mBAAmB,WAAW,EAAE,MAAM,IAAI,GACtD,WAAW,UAAU,CAAC,EAAE,QAAQ,OAAO,EAAE,GACzC,aAAa,UAAU,CAAC,KAAK;AAEnC,QAAI,UAAU;AACZ,UAAM,mBAAmB,mBAAmB,oBAAoB;AAChE,wBAAkB,iBAAiB,WAAW,WAAW,GAAG,IACxD,iBAAiB,MAAM,SAAS,SAAS,CAAC,IAC1C;AAAA,IACN,WAAW,QAAQ;AACjB,UAAM,iBAAiB,cAAc,MAAM,GACrC,gBAAgB,eAAe,eAAe,SAAS,CAAC;AAE9D,MAAI,kBACF,kBAAkB,0BAA0B,sBAAsB,aAAa;AAAA,IAEnF;AAGA,QAAI,cAAc,UAAU;AAC1B,UAAM,UAAU,WAAW,QAAQ,WAAW,EAAE,GAC1C,QAAQ,SAAS,QAAQ,WAAW,EAAE;AAC5C,MAAI,WAAW,SAAS,YAAY,UAClC,kBAAkB,gBAAgB;AAAA,QAChC,IAAI,OAAO,QAAQ,QAAQ,KAAK,KAAK,IAAI,GAAG;AAAA,QAC5C;AAAA,MACF;AAAA,IAEJ;AAAA,EACF,OAAO;AAGL,sBAAkB,SACd,mCAAmC,QAAQ,oBAAoB,IAC/D,mBAAmB,oBAAoB;AAE3C,QAAM,QAAQ,0CAA0C,QAAQ;AAChE,QAAI,SAAS,MAAM,WAAW,GAAG,GAAG;AAClC,UAAM,aAAa,QAAQ,eAAe;AAC1C,MAAI,cAAc,eAAe,UAC/B,kBAAkB,gBAAgB,MAAM,GAAG,CAAC,WAAW,MAAM,IAAI;AAAA,IAErE;AAAA,EACF;AAIA,2BAAkB,gCAAgC,eAAe,GAEjD,mBAAT,SAA4B,GAAG,MAAM,IAAI,eAAe,KAAyB,eAAvB;AACnE;;;Ab5CO,SAAS,mBACd,OACA,WAC6B;AAE7B,MAAI,MAAM,eAAe;AACvB,WAAO;AAAA,MACL,YAAY,MAAM;AAAA,MAClB,gBAAgB,MAAM;AAAA,MACtB,aAAa,MAAM,MAAM,IAAI,WAAS;AAAA,QACpC,UAAU;AAAA,QACV,WAAW;AAAA,QACX,SAAS;AAAA,QACT,aAAa;AAAA,MACf,EAAE;AAAA,MACF,SAAS;AAAA,MACT,gBAAgB,MAAM,MAAM;AAAA,MAC5B,gBAAgB,MAAM,MAAM;AAAA,MAC5B,aAAa;AAAA,IACf;AAIF,MAAI,MAAM,eAAe;AACvB,kBAAO,KAAK,0CAA0C,GAC/C;AAAA,MACL,YAAY,MAAM;AAAA,MAClB,gBAAgB,CAAC;AAAA,MACjB,aAAa,MAAM,MAAM,IAAI,WAAS;AAAA,QACpC,UAAU;AAAA,QACV,SAAS;AAAA,QACT,OAAO,IAAI,MAAM,gDAAgD;AAAA,QACjE,aAAa;AAAA,MACf,EAAE;AAAA,MACF,SAAS;AAAA,MACT,gBAAgB,MAAM,MAAM;AAAA,MAC5B,gBAAgB;AAAA,MAChB,aAAa,MAAM,MAAM;AAAA,IAC3B;AAIF,MAAM,WAAW,sBAAsB,MAAM,YAAY,SAAS;AAClE,MAAI,CAAC;AACH,kBAAO,MAAM,kCAAkC,MAAM,UAAU,EAAE,GAC1D;AAAA,MACL,YAAY,MAAM;AAAA,MAClB,gBAAgB,CAAC;AAAA,MACjB,aAAa,MAAM,MAAM,IAAI,WAAS;AAAA,QACpC,UAAU;AAAA,QACV,SAAS;AAAA,QACT,OAAO,IAAI,MAAM,kCAAkC,MAAM,UAAU,EAAE;AAAA,QACrE,aAAa;AAAA,MACf,EAAE;AAAA,MACF,SAAS;AAAA,MACT,gBAAgB,MAAM,MAAM;AAAA,MAC5B,gBAAgB;AAAA,MAChB,aAAa,MAAM,MAAM;AAAA,IAC3B;AAGF,MAAM,cAAc,SAAS,UAAU,CAAC;AACxC,MAAI,YAAY,WAAW;AACzB,kBAAO,KAAK,yCAAyC,MAAM,UAAU,EAAE,GAEhE;AAAA,MACL,YAAY,MAAM;AAAA,MAClB,gBAAgB,MAAM;AAAA,MACtB,aAAa,MAAM,MAAM,IAAI,WAAS;AAAA,QACpC,UAAU;AAAA,QACV,WAAW;AAAA,QACX,SAAS;AAAA,QACT,aAAa;AAAA,MACf,EAAE;AAAA,MACF,SAAS;AAAA,MACT,gBAAgB,MAAM,MAAM;AAAA,MAC5B,gBAAgB,MAAM,MAAM;AAAA,MAC5B,aAAa;AAAA,IACf;AAIF,MAAM,cAAsC,CAAC,GACvC,iBAAgC,CAAC;AAEvC,WAAW,QAAQ,MAAM,OAAO;AAC9B,QAAM,SAAS,kBAAkB,MAAM,aAAa,MAAM,UAAU;AACpE,gBAAY,KAAK,MAAM,GAEnB,OAAO,WAAW,OAAO,aAC3B,eAAe,KAAK,OAAO,SAAS;AAAA,EAExC;AAEA,MAAM,iBAAiB,YAAY,OAAO,OAAK,EAAE,OAAO,EAAE,QACpD,cAAc,YAAY,OAAO,OAAK,CAAC,EAAE,OAAO,EAAE;AAExD,SAAO;AAAA,IACL,YAAY,MAAM;AAAA,IAClB;AAAA,IACA;AAAA,IACA,SAAS,gBAAgB;AAAA,IACzB,gBAAgB,MAAM,MAAM;AAAA,IAC5B;AAAA,IACA;AAAA,EACF;AACF;AAYO,SAAS,kBACd,MACA,OACA,YACsB;AAEtB,MAAM,cAAc,iBAAiB,KAAK,MAAM,KAAK;AAErD,MAAI,CAAC,aAAa;AAIhB,QAAM,aAAa,2BAA2B,MAAM,OAAO,UAAU;AACrE,QAAI;AACF,UAAI;AACF,YAAM,YAAY,gBAAgB,MAAM,YAAY,UAAU;AAC9D,eAAO;AAAA,UACL,UAAU;AAAA,UACV;AAAA,UACA,SAAS;AAAA,UACT,aAAa;AAAA,UACb,aAAa;AAAA,QACf;AAAA,MACF,SAAS,OAAO;AACd,sBAAO,MAAM,6CAA6C,KAAK,IAAI,IAAI,KAAK,GACrE;AAAA,UACL,UAAU;AAAA,UACV,SAAS;AAAA,UACT,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,UAC/D,aAAa;AAAA,UACb,aAAa;AAAA,QACf;AAAA,MACF;AAIF,WAAO;AAAA,MACL,UAAU;AAAA,MACV,WAAW;AAAA,MACX,SAAS;AAAA,MACT,aAAa;AAAA,IACf;AAAA,EACF;AAEA,MAAI;AAEF,QAAM,YAAY,gBAAgB,MAAM,aAAa,UAAU;AAE/D,WAAO;AAAA,MACL,UAAU;AAAA,MACV;AAAA,MACA,SAAS;AAAA,MACT,aAAa;AAAA,MACb,aAAa;AAAA,IACf;AAAA,EACF,SAAS,OAAO;AACd,kBAAO,MAAM,2BAA2B,KAAK,IAAI,IAAI,KAAK,GACnD;AAAA,MACL,UAAU;AAAA,MACV,SAAS;AAAA,MACT,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,MAC/D,aAAa;AAAA,MACb,aAAa;AAAA,IACf;AAAA,EACF;AACF;AAYA,SAAS,iBAAiB,UAAkB,OAA4B;AACtE,WAAW,QAAQ,OAAO;AAGxB,QAAI,KAAK;AACP;AAGF,QAAM,UAAU,mBAAmB,MAAM,MAAM;AAE/C,QAAK,WAKD,UAAU,UAAU,OAAO;AAC7B,aAAO;AAAA,EAEX;AAEA,SAAO;AACT;AAQA,SAAS,2BACP,MACA,OACA,YACa;AAEb,MAAI,cAAc,KAAK;AAMvB,MALI,CAAC,eAAe,KAAK,YAEvB,cADe,iBAAiB,KAAK,OAAO,EACvB,eAAe,CAAC,IAGnC,CAAC,eAAe,OAAO,KAAK,WAAW,EAAE,WAAW;AACtD,WAAO;AAGT,MAAI,OAA6C;AAEjD,WAAW,QAAQ,OAAO;AACxB,QAAM,SAAS,eAAe,iBAAiB,MAAM,MAAM;AAC3D,QAAI,CAAC,OAAQ;AAEb,QAAM,QAAQ,mBAAmB,aAAa,QAAQ,MAAM,KAAK,MAAM,UAAU;AAGjF,IAAI,MAAM,SAAS,QAEf,CAAC,QAAQ,MAAM,QAAQ,KAAK,WAC9B,OAAO,EAAE,MAAM,OAAO,MAAM,MAAM;AAAA,EAEtC;AAEA,SAAO,MAAM,QAAQ;AACvB;AAKA,SAAS,UAAU,UAAkB,SAA0B;AAC7D,MAAI;AACF,WAAOC,WAAU,UAAU,OAAO;AAAA,EACpC,SAAS,OAAO;AACd,kBAAO,KAAK,yBAAyB,OAAO,IAAI,KAAK,GAC9C;AAAA,EACT;AACF;AAaA,SAAS,gBACP,MACA,MACA,YACa;AAEb,MAAI,cAAc,KAAK,aACnB,OAAO;AAEX,MAAI,CAAC,eAAe,KAAK,SAAS;AAChC,QAAM,SAAS,iBAAiB,KAAK,OAAO;AAC5C,kBAAc,OAAO,aACrB,OAAO,OAAO;AAAA,EAChB;AAGA,MAAI,yBAAyB;AAE7B,MAAI,KAAK,OAAO,KAAK,IAAI,SAAS,KAAK,aAAa;AAElD,QAAM,aAAa,iBAAiB;AAAA,MAClC,UAAUC,UAAS,KAAK,IAAI;AAAA,MAC5B,SAASC,SAAQ,KAAK,IAAI;AAAA,MAC1B,MAAM,KAAK;AAAA,MACX,KAAKC,SAAQ,KAAK,IAAI;AAAA,IACxB,CAAC;AAGD,6BAAyB;AAAA,MACvB;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAGA,MAAI,kBAAkB,cAAc,KAAK,MAAM,IAAI,GAE7C,WAAW,gCAAgC,eAAe;AAChE,EAAI,aAAa,oBACf,kBAAkB;AAIpB,MAAI,qBAAqB,KAAK;AAC9B,MAAI,0BAA0B,SAAS,QAAW;AAChD,QAAM,aAAa,SAAS,sBAAsB;AAElD,yBAAqB;AAAA,EADH,WAAW,SAAS;AAAA,CAAI,IAAI,aAAa,GAAG,UAAU;AAAA,CAClC;AAAA,EAAQ,IAAI;AAAA,EACpD;AAEA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,SAAS;AAAA,IACT,aAAa;AAAA,EACf;AACF;AAYA,SAAS,cAAc,UAAkB,MAAoB;AAC3D,MAAM,cAAc,mBAAmB,MAAM,MAAM,GAC7C,YAAY,mBAAmB,MAAM,IAAI;AAM/C,MAJI,CAAC,eAAe,CAAC,aAIjB,CAAC,UAAU,UAAU,WAAW;AAClC,WAAO;AAGT,MAAI,UAAU,SAAS,IAAI;AACzB,WAAO,uCAAuC,UAAU,aAAa,SAAS;AAGhF,MAAI,UAAU,SAAS,GAAG,GAAG;AAC3B,QAAM,YAAYA,SAAQ,QAAQ,GAC5B,aAAaF,UAAS,UAAU,SAAS,GACzC,UAAU,UAAU,MAAM,GAAG,GAC7B,WAAW,QAAQ,CAAC,GACpB,WAAW,QAAQ,CAAC,KAAK,IACzB,YAAY,SAAS,WAAW,GAAG,IAAI,WAAY,YAAY;AACrE,WAAO,WAAW,aAAa;AAAA,EACjC;AAEA,SAAO;AACT;;;Ac9aO,SAAS,kBACd,QACe;AAEf,MAAM,WAA0B,CAAC;AAEjC,WAAW,CAAC,YAAY,KAAK,KAAK;AAChC,aAAS,KAAK,GAAG,KAAK;AAMxB,SAFqB,iBAAiB,QAAQ;AAGhD;AAYO,SAAS,iBAAiB,OAAqC;AACpE,MAAM,UAAU,oBAAI,IAAyB;AAE7C,WAAW,QAAQ,OAAO;AACxB,QAAM,WAAW,QAAQ,IAAI,KAAK,IAAI;AAEtC,QAAI,CAAC,UAAU;AAEb,cAAQ,IAAI,KAAK,MAAM,IAAI;AAC3B;AAAA,IACF;AAKA,IAFiB,kBAAkB,MAAM,QAAQ,MAEhC,SACf,QAAQ,IAAI,KAAK,MAAM,IAAI;AAAA,EAE/B;AAEA,SAAO,MAAM,KAAK,QAAQ,OAAO,CAAC;AACpC;AASA,SAAS,kBACP,SACA,cACoB;AAIpB,MAAM,iBAAiB,wBAAwB,OAAO,GAChD,sBAAsB,wBAAwB,YAAY;AAEhE,SAAI,kBAAkB,CAAC,sBACd,QAIA;AAKX;AAcA,SAAS,wBAAwB,MAA4B;AAC3D,MAAI,CAAC,KAAK;AAER,WAAO;AAGT,MAAM,KAAK,KAAK;AAGhB,MAAI,WAAW;AAEb,WAAI,QAAM,QAAQ,GAAG,KAAK;AAQ5B,MAAI,iBAAiB,MAEf,OAAO,GAAG,eAAgB,YAAY,GAAG,gBAAgB;AAC3D,WAAO;AAKX,MAAM,0BAA0B;AAAA,IAC9B;AAAA;AAAA,IACA;AAAA;AAAA,IACA;AAAA;AAAA,IACA;AAAA;AAAA,IACA;AAAA;AAAA,IACA;AAAA;AAAA,EACF;AAEA,WAAW,SAAS;AAClB,QAAI,SAAS;AAEX,aAAO;AAKX,SAAO;AACT;;;AC/EO,SAAS,wBACd,cACmB;AAEnB,MAAM,aAAa,MAAM,KAAK,aAAa,OAAO,CAAC,EAChD,OAAO,CAAC,KAAK,UAAU,MAAM,MAAM,QAAQ,CAAC;AAE/C,SAAO;AAAA,IACL;AAAA,IACA,iBAAiB,oBAAI,IAAI;AAAA,IACzB,QAAQ,oBAAI,IAAI;AAAA,IAChB,UAAU;AAAA,MACR;AAAA,MACA,gBAAgB;AAAA,MAChB,cAAc;AAAA,MACd,aAAa;AAAA,MACb,WAAW,KAAK,IAAI;AAAA,IACtB;AAAA,IACA,kBAAkB,oBAAI,IAAI;AAAA,EAC5B;AACF;AAWO,SAAS,sBACd,SACA,YACA,gBACA,gBACA,cACM;AACN,UAAQ,gBAAgB,IAAI,YAAY,cAAc,GACtD,QAAQ,SAAS,kBAAkB,gBACnC,QAAQ,SAAS,gBAAgB;AACnC;AASO,SAAS,sBACd,SACA,UACA,OACM;AACN,UAAQ,OAAO,IAAI,UAAU,KAAK,GAClC,QAAQ,SAAS,eAEjB,OAAO,MAAM,iCAAiC,QAAQ,IAAI,KAAK;AACjE;AASO,SAAS,mBAAmB,SAAkC;AACnE,UAAQ,SAAS,UAAU,KAAK,IAAI,GACpC,QAAQ,SAAS,aAAa,QAAQ,SAAS,UAAU,QAAQ,SAAS;AAC5E;;;AC3DA,eAAsB,qBACpB,OACA,aACA,UAA6B,CAAC,GACI;AAClC,MAAM,SAAkB,CAAC,GACnB,WAAqB,CAAC;AAE5B,MAAI;AAEF,QAAI,QAAQ;AACV,aAAO;AAAA,QACL,cAAc;AAAA,QACd,iBAAiB,0BAA0B,KAAK;AAAA,QAChD,OAAO,0BAA0B,KAAK;AAAA,QACtC,QAAQ,CAAC;AAAA,QACT,UAAU,CAAC;AAAA,MACb;AAKF,QAAM,kBAAkB,MAAM,4BAA4B,KAAY;AAYtE,QAVA,OAAO,KAAK,6BAA6B;AAAA,MACvC,eAAe,gBAAgB;AAAA,MAC/B,iBAAiB,gBAAgB;AAAA,MACjC,YAAY,gBAAgB;AAAA,MAC5B,YAAY,gBAAgB,SAAS;AAAA,IACvC,CAAC,GAKG,CAFoB,iBAAiB,iBAAiB,OAAO,KAS3D,GAJ+B,gBAAgB,cAAc,QAAQ,KAAK,KAC5E,MAAM,KAAK,gBAAgB,cAAc,KAAK,KAAK,CAAC,CAAC,EAAE;AAAA,MACrD,OAAK,MAAM,eAAe,MAAM;AAAA,IAClC;AAEA,aAAO;AAAA,QACL,cAAc;AAAA,QACd;AAAA,QACA,OAAO,0BAA0B,KAAK;AAAA,QACtC,QAAQ,CAAC;AAAA,QACT,UAAU,CAAC;AAAA,MACb;AAKJ,WAAO,KAAK,8CAA8C;AAAA,MACxD,eAAe,gBAAgB;AAAA,MAC/B,YAAY,gBAAgB,cAAc,QAAQ;AAAA,IACpD,CAAC;AAED,QAAM,mBAAmB,MAAM;AAAA,MAC7B;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,IACV;AAEA,WAAO;AAAA,MACL,cAAc;AAAA,MACd;AAAA,MACA,OAAO,0BAA0B,iBAAiB,KAAK;AAAA,MACvD,mBAAmB,iBAAiB;AAAA,MACpC,QAAQ,iBAAiB;AAAA,MACzB,UAAU,iBAAiB;AAAA,IAC7B;AAAA,EAEF,SAAS,OAAO;AACd,kBAAO,MAAM,kCAAkC,EAAE,MAAM,CAAC,GACxD,OAAO,KAAK,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC,GAG9D;AAAA,MACL,cAAc;AAAA,MACd,iBAAiB,2BAA2B,OAAO,KAAK;AAAA,MACxD,OAAO,0BAA0B,KAAK;AAAA,MACtC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACF;AAgBO,SAAS,iBACd,QACA,UAA6B,CAAC,GACrB;AAET,SAAI,QAAQ,kBACH,KAIL,OAAO,kBAAkB,cACpB,KAIL,OAAO,kBAAkB;AAM/B;AAOA,eAAe,kBACb,OACA,iBACA,WAMC;AACD,MAAM,SAAkB,CAAC,GACnB,WAAqB,CAAC,GAGtB,mBAAmB,gBAAgB;AAEzC,MAAI,CAAC,oBAAoB,iBAAiB,SAAS;AACjD,kBAAO,KAAK,uCAAuC,GAC5C;AAAA,MACL;AAAA,MACA,SAAS,wBAAwB,oBAAI,IAAI,CAAC;AAAA,MAC1C,QAAQ,CAAC;AAAA,MACT,UAAU,CAAC,uCAAuC;AAAA,IACpD;AAIF,MAAM,UAAU,oBAAI,IAAkC;AACtD,WAAW,QAAQ;AACjB,YAAQ,IAAI,KAAK,MAAM,IAAI;AAI7B,MAAM,eAAe,oBAAI,IAAwD;AACjF,WAAW,CAAC,YAAY,KAAK,KAAK,iBAAiB,QAAQ,GAAG;AAC5D,QAAM,aAAqC,CAAC;AAC5C,aAAWG,SAAQ,OAAO;AACxB,UAAM,OAAO,QAAQ,IAAIA,KAAI;AAC7B,MAAI,OACF,WAAW,KAAK,IAAI,IAEpB,OAAO,KAAK,+BAA+BA,KAAI,EAAE;AAAA,IAErD;AACA,IAAI,WAAW,SAAS,KACtB,aAAa,IAAI,YAAY,UAAU;AAAA,EAE3C;AAGA,MAAM,oBAAoB,wBAAwB,YAAY,GAGxD,kBAAkB,oBAAI,IAA+C;AAE3E,WAAW,CAAC,YAAY,UAAU,KAAK,aAAa,QAAQ;AAC1D,QAAI;AASF,UAAM,cAAc,mBAPa;AAAA,QAC/B;AAAA,QACA,OAAO;AAAA,QACP,YAAY;AAAA;AAAA,MACd,GAGoD,SAAS;AAI7D;AAAA,QACE;AAAA,QACA;AAAA,QACA,YAAY;AAAA,QACZ,YAAY;AAAA,QACZ,YAAY,iBAAiB,YAAY;AAAA,MAC3C;AAGA,eAAW,cAAc,YAAY;AACnC,QAAI,CAAC,WAAW,WAAW,WAAW,UACpC;AAAA,UACE;AAAA,UACA,WAAW,SAAS;AAAA,UACpB,WAAW;AAAA,QACb,GACA,OAAO,KAAK,WAAW,KAAK;AAMhC,sBAAgB,IAAI,YAAY,YAAY,cAAqB;AAAA,IAEnE,SAAS,OAAO;AACd,aAAO,MAAM,mCAAmC,UAAU,IAAI,EAAE,MAAM,CAAC;AACvE,UAAM,MAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AACpE,aAAO,KAAK,GAAG,GACf,SAAS,KAAK,qBAAqB,UAAU,kBAAkB,IAAI,OAAO,EAAE;AAG5E,eAAW,QAAQ;AACjB,8BAAsB,mBAAmB,KAAK,MAAM,GAAG;AAAA,IAE3D;AAIF,MAAM,cAAc,kBAAkB,eAAe;AAGrD,4BAAmB,iBAAiB,GAEpC,OAAO,KAAK,2BAA2B;AAAA,IACrC,YAAY,YAAY;AAAA,IACxB,gBAAgB,kBAAkB,SAAS;AAAA,IAC3C,aAAa,kBAAkB,SAAS;AAAA,IACxC,YAAY,kBAAkB,SAAS;AAAA,EACzC,CAAC,GASM;AAAA,IACL,OAPoC,YAAY,IAAI,QAAM;AAAA,MAC1D,MAAM,EAAE;AAAA,MACR,SAAS,EAAE,WAAW;AAAA;AAAA,MACtB,GAAI,EAAE,eAAe,EAAE,aAAa,EAAE,YAAY;AAAA,IACpD,EAAE;AAAA,IAIA,SAAS;AAAA,IACT;AAAA,IACA;AAAA,EACF;AACF;AAMA,SAAS,0BAA0B,OAA8C;AAC/E,SAAO,MAAM,IAAI,QAAM;AAAA,IACrB,MAAM,EAAE;AAAA,IACR,SAAS,EAAE,WAAW;AAAA,EACxB,EAAE;AACJ;AAKA,SAAS,0BAA0B,OAAsD;AACvF,SAAO;AAAA,IACL,eAAe;AAAA,IACf,iBAAiB;AAAA,IACjB,YAAY;AAAA,IACZ,UAAU;AAAA,MACR,YAAY,MAAM;AAAA,MAClB,eAAe;AAAA,MACf,cAAc,MAAM;AAAA,MACpB,oBAAoB,oBAAI,IAAI,CAAC,CAAC,aAAa,MAAM,MAAM,CAAC,CAAC;AAAA,IAC3D;AAAA,EACF;AACF;AAKA,SAAS,2BACP,OACA,OACuB;AACvB,gBAAO,MAAM,8CAA8C,EAAE,MAAM,CAAC,GAE7D;AAAA,IACL,eAAe;AAAA,IACf,iBAAiB;AAAA,IACjB,YAAY;AAAA,IACZ,UAAU;AAAA,MACR,YAAY,MAAM;AAAA,MAClB,eAAe;AAAA,MACf,cAAc,MAAM;AAAA,MACpB,oBAAoB,oBAAI,IAAI,CAAC,CAAC,WAAW,MAAM,MAAM,CAAC,CAAC;AAAA,IACzD;AAAA,EACF;AACF;;;AClZA,SAAS,WAAAC,gBAAe;AACxB,SAAS,QAAAC,cAAY;AACrB,SAAS,YAAAC,iBAAgB;;;ACfzB,SAAS,aAAAC,kBAAiB;AAUnB,SAAS,iBAAiBC,OAAc,SAA0B;AACvE,SAAOD,WAAUC,OAAM,SAAS,EAAE,KAAK,GAAK,CAAC;AAC/C;;;ACNA,SAAS,QAAAC,QAAM,WAAAC,gBAAe;AAC9B,SAAS,SAAS,MAAAC,KAAI,WAAAC,gBAAe;AACrC,SAAS,UAAAC,eAAc;AACvB,SAAS,cAAAC,mBAAkB;AAwB3B,SAAS,iBAAiB,SAAyB;AACjD,SAAOC,YAAW,QAAQ,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK,EAAE,MAAM,GAAG,CAAC;AACtE;AAQA,eAAsB,2BAA2B,SAAiB,mBAAoC;AAEpG,SADgB,MAAM,QAAQC,OAAKC,QAAO,GAAG,MAAM,CAAC;AAEtD;AAsBA,eAAsB,+BACpB,cACA,QAA8B,EAAE,MAAM,OAAO,GAC5B;AACjB,MAAM,WAAW,MAAM,SAAS,SAC5B,UACA,WAAW,iBAAiB,MAAM,OAAO,CAAC,IACxC,gBAAgBD,OAAK,cAAc,mBAAmB,QAAQ;AACpE,eAAM,UAAU,aAAa,GACtB;AACT;AAYA,eAAsB,sBACpB,cACA,WACe;AACf,MAAM,gBAAgBA,OAAK,cAAc,iBAAiB,GACtD;AACJ,MAAI;AACF,cAAU,MAAME,SAAQ,aAAa;AAAA,EACvC,QAAQ;AAEN;AAAA,EACF;AAEA,MAAM,UAAU,UAAU,SAAS,SAC/B,UACA,WAAW,iBAAiB,UAAU,OAAO,CAAC;AAElD,WAAW,SAAS;AAClB,QAAI,UAAU,YAEV,MAAM,WAAW,OAAO,KAAK,MAAM,WAAW,UAAU,IAAG;AAC7D,UAAM,UAAUF,OAAK,eAAe,KAAK;AACzC,UAAI;AACF,cAAMG,IAAG,SAAS,EAAE,WAAW,IAAM,OAAO,GAAK,CAAC,GAClD,OAAO,MAAM,yCAAyC,KAAK,EAAE;AAAA,MAC/D,SAAS,OAAO;AACd,eAAO,KAAK,gDAAgD,EAAE,OAAO,MAAM,CAAC;AAAA,MAC9E;AAAA,IACF;AAEJ;AAQA,eAAsB,sBACpB,OACA,SACe;AACf,WAAW,QAAQ,OAAO;AACxB,QAAM,WAAWH,OAAK,SAAS,KAAK,IAAI;AACxC,UAAM,UAAUI,SAAQ,QAAQ,CAAC,GACjC,MAAM,cAAc,UAAU,KAAK,OAAO;AAAA,EAC5C;AACF;AAQA,eAAsB,uBACpB,SACA,SACe;AACf,MAAM,cAAcJ,OAAK,SAAS,+BAA+B,GAC3D,cAAc,cAAc,OAAO;AACzC,QAAM,cAAc,aAAa,WAAW;AAC9C;AAOA,eAAsB,qBAAqB,SAAuC;AAChF,MAAK;AAIL,QAAI;AACF,YAAMG,IAAG,SAAS,EAAE,WAAW,IAAM,OAAO,GAAK,CAAC;AAAA,IACpD,SAAS,OAAO;AACd,aAAO,KAAK,oCAAoC;AAAA,QAC9C;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AACF;;;AFnIA,eAAsB,aAAa,KAAyC;AAE1E,MAAI,CAAC,IAAI,OAAO,aAAa;AAC3B,WAAO,KAAK,yCAAyC;AACrD;AAAA,EACF;AAGA,MAAI,IAAI,OAAO,gBAAgB,eAAe,iBAOzC,IAAkC,iBAAiB;AAIxD,QAAI;AAEF,UAAM,QAAQ,MAAM,iBAAiB,IAAI,OAAO,aAAa;AAAA,QAC3D,WAAW,IAAI;AAAA,QACf,gBAAgB,IAAI;AAAA,MACtB,CAAC;AAED,UAAI,MAAM,WAAW,GAAG;AACtB,eAAO,KAAK,gDAAgD;AAC5D;AAAA,MACF;AAGA,UAAM,mBAAmB,MAAM;AAAA,QAC7B;AAAA,QACA,IAAI,OAAO;AAAA,QACX;AAAA,UACE,WAAW,IAAI;AAAA,UACf,gBAAgB;AAAA,QAClB;AAAA,MACF;AAGA,UAAI,kBAAkB,iBAAiB,iBACvC,IAAI,kBAAkB,iBAAiB,cAEnC,iBAAiB,OAAO,SAAS,MACnC,IAAI,mBAAmB,iBAAiB,QACxC,OAAO,KAAK,yBAAyB;AAAA,QACnC,YAAY,iBAAiB,OAAO;AAAA,MACtC,CAAC;AAIH,eAAW,WAAW,iBAAiB;AACrC,mBAAW,KAAK,OAAO;AAIzB,UAAI,iBAAiB,cAAc;AACjC,eAAO,KAAK,6CAA6C;AAAA,UACvD,aAAa,IAAI,OAAO;AAAA,UACxB,gBAAgB,iBAAiB,gBAAgB;AAAA,UACjD,WAAW,iBAAiB,MAAM;AAAA,UAClC,iBAAiB,iBAAiB,gBAAgB;AAAA,QACpD,CAAC;AAGD,YAAM,sBAAsB,IAAI,OAAO,aAGjC,aAAa,oBAAoB,SAAS,0BAA0B,KACvD,oBAAoB,SAAS,yBAAyB,GAErE,gBACA,gBAAgB;AAEpB,YAAI,YAAY;AAGd,cAAM,gBAAgB,oBAAoB,MAAM,+CAA+C,GACzF,eAAe,gBAAgB,cAAc,CAAC,IAAI,qBAIlD,aAAmC,IAAI,iBAAiB,YAAY,IAAI,iBAC1E,EAAE,MAAM,UAAU,SAAS,IAAI,eAAe,IAC9C,EAAE,MAAM,OAAO,GAKb,iBAAiB,MAAM,+BAA+B,cAAc,UAAU;AACpF,gBAAM,qBAAqB,cAAc,GAIrC,WAAW,SAAS,UACtB,MAAM,sBAAsB,cAAc,UAAU,GAItD,iBAAiB,MAAM,+BAA+B,cAAc,UAAU,GAC9E,MAAM,sBAAsB,iBAAiB,OAAO,cAAc,GAClE,gBAAgB,IAChB,OAAO,KAAK,WAAW,SAAS,WAC5B,kEACA,mDAAmD;AAAA,YACrD;AAAA,YACA,OAAO,WAAW;AAAA,YAClB,WAAW,iBAAiB,MAAM;AAAA,UACpC,CAAC;AAAA,QACH;AAEE,2BAAiB,MAAM,2BAA2B,oBAAoB,GACtE,MAAM,sBAAsB,iBAAiB,OAAO,cAAc,GAClE,gBAAgB,IAChB,OAAO,KAAK,0CAA0C,EAAE,eAAe,CAAC;AAI1E,QAAI,kBACF,IAAI,sBAAsB,iBAI5B,IAAI,uBAAuB,qBAG3B,IAAI,OAAO,cAAc;AACzB,YAAM,UAAU,IAAI,iBAAiB,KAAK,CAAC,MAAW,EAAE,MAAM;AAQ9D,YAPI,YACF,QAAQ,cAAc,gBAErB,QAAgB,sBAAsB,sBAIrC,IAAI,iBAAiB,SAAS,GAAG;AACnC,cAAM,cAAc,IAAI,iBAAiB,CAAC;AAC1C,UAAI,YAAY,QAEd,YAAY,IAAI,QAAQ,iBAAiB,OAGpC,YAAY,IAAI,aACnB,YAAY,IAAI,WAAW,CAAC,IAE7B,YAAY,IAAI,SAAiB,gBAAgB,IACjD,YAAY,IAAI,SAAiB,kBAAkB,iBAAiB,gBAAgB;AAAA,QAEzF;AAAA,MACF;AAAA,IAEF,SAAS,OAAO;AAEd,aAAO,MAAM,wBAAwB,EAAE,MAAM,CAAC,GAC9C;AAAA,QACE;AAAA,QACA,6BAA6B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MACrF,GAGK,IAAI,qBACP,IAAI,mBAAmB,CAAC,IAE1B,IAAI,iBAAiB;AAAA,QACnB,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,MAC1D;AAAA,IACF;AACF;AAUA,eAAe,iBACb,aACA,MACiC;AACjC,MAAM,QAAgC,CAAC;AAEvC,WAAS,eAAe,SAAiB,WAAmB,gBAAkC;AAC5F,QAAM,aAAa,QAAQ,QAAQ,OAAO,GAAG,EAAE,QAAQ,UAAU,EAAE;AAEnE,QAAI,EAD0B,iBAAiB,iBAAiB,YAAY,cAAc,IAAI;AAE5F,aAAO;AAGT,QAAM,cAAc,wBAAwB,YAAY,SAAS,GAC7D,cAA8D;AAElE,QAAI,CAAC,aAAa;AAEhB,UAAM,YAAY,uBAAuB,SAAS;AAClD,eAAW,CAAC,YAAY,GAAG,KAAK,OAAO,QAAQ,SAAS,GAAG;AACzD,YAAM,cAAc,IAAI,UAAU,CAAC;AACnC,iBAAW,QAAQ,aAAa;AAC9B,cAAM,UAAU,mBAAmB,MAAa,MAAM;AACtD,cAAI,WAAW,iBAAiB,YAAY,OAAO,GAAG;AACpD,0BAAc,EAAE,YAAY,QAAQ;AACpC;AAAA,UACF;AAAA,QACF;AACA,YAAI,YAAa;AAAA,MACnB;AAAA,IACF;AAIA,WAFiB,eAAe,gBAAgB;AAAA,EAGlD;AAKA,MAAM,kBAAkB,oBAAI,IAAY,GAClC,eAAe,uBAAuB,KAAK,SAAS;AAC1D,WAAW,OAAO,OAAO,OAAO,YAAY;AAC1C,aAAW,UAAW,IAAY,aAAa,CAAC;AAC9C,MAAI,OAAO,UAAW,YAAY,OAAO,WAAW,GAAG,KACrD,gBAAgB,IAAI,OAAO,MAAM,GAAG,EAAE,CAAC,CAAC;AAK9C,iBAAe,KAAK,KAAa,SAAiBE,OAAqE;AACrH,QAAM,UAAU,MAAMC,SAAQ,KAAK,EAAE,eAAe,GAAK,CAAC;AAE1D,aAAW,SAAS,SAAS;AAC3B,UAAM,WAAWC,OAAK,KAAK,MAAM,IAAI;AAGrC,UAAI,MAAM,YAAY,GAAG;AAIvB,YAHI,MAAM,KAAK,WAAW,GAAG,KAAK,CAAC,gBAAgB,IAAI,MAAM,IAAI,KAG7D,MAAM,SAAS;AACjB;AAGF,cAAM,KAAK,UAAU,SAASF,KAAI;AAAA,MACpC,OAAO;AACL,YAAM,eAAe,wBAAwB,UAAU,OAAO;AAC9D,YAAI,CAAC,eAAe,cAAcA,MAAK,WAAWA,MAAK,cAAc;AACnE;AAGF,YAAI;AACF,cAAM,UAAU,MAAMG,UAAS,UAAU,OAAO;AAEhD,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN;AAAA,UACF,CAAC;AAAA,QACH,SAAS,OAAO;AACd,iBAAO,KAAK,wBAAwB,QAAQ,IAAI,EAAE,MAAM,CAAC;AAAA,QAC3D;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,eAAM,KAAK,aAAa,aAAa,IAAI,GAElC;AACT;;;AGrTA,SAAS,QAAAC,cAAY;;;ACuCd,SAAS,wBAAwB,SAGlB;AACpB,MAAM,QAAQ,QAAQ,MAAM,UAAU,IAElC;AACJ,MAAI,QAAQ,aAAa;AACvB,QAAI,CAAC;AACH,YAAM,IAAI;AAAA,QACR;AAAA,MAEF;AAEF,WAAO;AAAA,EACT,MAAO,CAAI,CAAC,SAAS,QAAQ,IAAI,OAAO,SACtC,OAAO,UAEP,OAAO;AAGT,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,UAAU,MAA2B;AACnC,aAAI,SAAS,UAAgB,KACzB,SAAS,WAAiB,KAEvB,OAAO;AAAA,IAChB;AAAA,EACF;AACF;;;ACtEA,SAAS,QAAAC,cAAY;AA8BrB,eAAe,sBAAsB,eAA+C;AAClF,MAAM,eAAeC,OAAK,eAAe,cAAc,eAAe,GAChE,oBAAoB,uBAAuB,aAAa;AAE9D,SAAI,MAAM,OAAO,YAAY,IACpB,eACE,MAAM,OAAO,iBAAiB,IAChC,oBAGF;AACT;AAKA,eAAsB,2BAA2B,aAAqB,WAA2C;AAC/G,MAAM,kBAAkBA,OAAK,WAAW,IAAoB,GACtD,yBAAyBA,OAAK,iBAAiB,WAAW;AAEhE,MAAI,CAAE,MAAM,OAAO,sBAAsB;AACvC,WAAO;AAGT,MAAM,aAAa,MAAM,sBAAsB,sBAAsB;AACrE,MAAI,CAAC;AACH,WAAO;AAGT,MAAI;AAEF,YADe,MAAM,gBAAgB,UAAU,GACjC,WAAW;AAAA,EAC3B,SAAS,OAAO;AACd,kBAAO,KAAK,sCAAsC,WAAW,KAAK,KAAK,EAAE,GAClE;AAAA,EACT;AACF;AA6NA,eAAsB,oCACpB,KACA,aACkE;AAElE,MAAM,UAAkE,CAAC;AAGzE,UAAQ,KAAK;AAAA,IACX,KAAKC,OAAK,KAAK,IAAoB;AAAA,IACnC,MAAM,CAAC,cAAc,QAAQ;AAAA,IAC7B,OAAO;AAAA,EACT,CAAC;AAGD,MAAI;AACF,QAAM,YAAY,MAAM,qBAAqB,GAAG;AAChD,aAAW,YAAY,WAAW;AAChC,UAAM,MAAM,sBAAsB,QAAoB;AAGtD,UAAI,IAAI,UAAU,IAAI,OAAO,SAAS,GAAG;AACvC,YAAM,eAAe,oBAAI,IAAY;AAErC,iBAAW,QAAQ,IAAI,QAAQ;AAC7B,cAAM,YAAY,uBAAuB,KAAK,IAAI,qBAAqB;AACvE,cAAI,WAAW;AAEb,gBAAM,QAAQ,UAAU,MAAM,GAAG;AACjC,gBAAI,MAAM,SAAS,GAAG;AACpB,kBAAM,WAAW,MAAM,MAAM,GAAG,EAAE,EAAE,KAAK,GAAG;AAC5C,2BAAa,IAAI,QAAQ;AAAA,YAC3B;AAAA,UACF;AAAA,QACF;AAGA,iBAAW,WAAW,cAAc;AAClC,cAAM,cAAcA,OAAK,KAAK,OAAO;AACrC,kBAAQ,KAAK;AAAA,YACX,KAAK;AAAA,YACL,MAAM;AAAA;AAAA,YACN,OAAO,IAAI;AAAA,UACb,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,WAAO,MAAM,4CAA4C,KAAK,EAAE;AAAA,EAClE;AAEA,SAAO,MAAM,kCAAkC,WAAW,YAAY,QAAQ,MAAM,YAAY;AAGhG,WAAW,UAAU,SAAS;AAC5B,QAAM,aAAa,OAAO;AAC1B,QAAI,gBAAc,WAAW,WAAW;AAIxC,UAAI;AACF,YAAM,QAAQ,MAAM,qBAAqB,OAAO,KAAK,cAAc,CAAC,CAAC;AACrE,iBAAW,QAAQ;AAAO;AAAA,MAG5B,SAAS,QAAQ;AACf,eAAO,MAAM,8BAA8B,OAAO,GAAG,KAAK,MAAM,EAAE;AAAA,MACpE;AAAA,EACF;AAEA,SAAO,EAAE,OAAO,GAAM;AACxB;;;ACtWA,YAAYC,aAAY;AA2CxB,eAAsB,iCACpB,aACA,kBAC+D;AAC/D,MAAI,iBAAiB,SACjB,wBAGE,mBAAoB,iBAAiB,CAAC,GAAW;AAEvD,WAAW,YAAY;AACrB,IAAI,0BAA0B,SAAS,MAAM,WAAW,KAC3C,WAAG,SAAS,SAAS,cAAc,MAC5C,iBAAiB,SAAS;AAMhC,MAAI,oBAAoB,iBAAiB,IAAI,WAAW,GAAG;AACzD,QAAM,WAAW,iBAAiB,IAAI,WAAW;AACjD,aAAW,WAAW;AACpB,OAAI,CAAC,0BAAiC,WAAG,SAAS,sBAAsB,OACtE,yBAAyB;AAAA,EAG/B;AAEA,SAAO,EAAE,gBAAgB,iBAAiB,uBAAuB;AACnE;;;AHrDA,eAAeC,4BAA2B,KAAa,aAAkD;AACvG,MAAI;AACF,QAAM,aAAa,mBAAmB,KAAK,WAAW,GAChD,iBAAiBC,OAAK,YAAY,cAAc,eAAe;AACrE,QAAI,MAAM,OAAO,cAAc;AAE7B,cADe,MAAM,gBAAgB,cAAc,GACrC;AAAA,EAElB,QAAQ;AAAA,EAER;AAEF;AAKA,eAAsB,kCACpB,kBACA,SACA,QACA,QACA,QACqG;AACrG,MAAM,MAAM,QAAQ,IAAI,GAClB,kBAA4B,CAAC,GAC7B,yBAAyB,oBAAI,IAAY,GACzC,IAAI,UAAU,cAAc,GAC5B,MAAM,UAAU,cAAc;AAGpC,WAAW,YAAY,kBAAkB;AACvC,QAAM,gBAAgB,MAAM,oCAAoC,KAAK,SAAS,IAAI;AAElF,QAAI,cAAc,OAAO;AACvB,UAAM,cAAc,MAAM,iCAAiC,SAAS,MAAM,gBAAgB,GACpF,kBAAkB,cAAc,WAAW,MAAMD,4BAA2B,KAAK,SAAS,IAAI;AAIpG,UAAI,QAAQ;AAEV;AAGF,UAAI,QAAQ,OAAO;AAEjB,eAAO,KAAK,uDAAuD,SAAS,IAAI,MAAM,eAAe,EAAE,GACvG,uBAAuB,IAAI,SAAS,IAAI;AACxC;AAAA,MACF;AAGA,UAAI,UAAU,CAAC,OAAO,8BAAiC;AACrD,YAAI,KAAK,aAAa,SAAS,IAAI,+CAA+C,GAClF,gBAAgB,KAAK,SAAS,IAAI;AAAA,WAC7B;AACL,YAAM,gBAAgB,kBAAkB,KAAK,eAAe,MAAM;AAIlE,QAHkB,MAAM,EAAE;AAAA,UACxB,YAAY,SAAS,IAAI,mBAAmB,aAAa;AAAA,QAC3D,IAEE,uBAAuB,IAAI,SAAS,IAAI,IAExC,gBAAgB,KAAK,SAAS,IAAI;AAAA,MAEtC;AACA;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,eAAe,IAAM,iBAAiB,uBAAuB;AACxE;;;AInFA,eAAsB,sBAAsB,KAA4C;AACtF,MAAM,iBAAiB,MAAM;AAAA,IAC3B,IAAI;AAAA,IACJ,IAAI;AAAA,IACJ,IAAI,WAAW;AAAA,IACf,IAAI,WAAW;AAAA,IACf,IAAI,WAAW;AAAA,EACjB;AAEA,SAAK,eAAe,iBAKpB,IAAI,mBAAmB,IAAI,iBAAiB,OAAO,SAAO,CAAC,eAAe,gBAAgB,SAAS,IAAI,IAAI,CAAC,GAG5G,IAAI,iBAAiB,gBAEd,MATE;AAUX;;;ACbO,SAAS,yBAAyB,WAAuB,WAAiC;AAC/F,MAAM,QAAQ,oBAAI,IAAY,CAAC,cAAc,SAAS,CAAC;AACvD,WAAW,YAAY,WAAW;AAChC,QAAM,MAAM,sBAAsB,UAAU,SAAS;AACrD,IAAI,IAAI,YACN,MAAM,IAAI,IAAI,QAAQ;AAAA,EAE1B;AACA,SAAO;AACT;;;AClBA,SAAS,QAAAE,cAAY;AAUrB,eAAsB,2BACpB,aACA,SACA,WACA,aACA,gBACkC;AAIlC,MAAI,aAAa;AACf,QAAM,oBAAoB,MAAM,OAAOC,OAAK,aAAa,iBAAiB,CAAC,GACrE,sBAAsB,MAAM,OAAOA,OAAK,aAAa,kBAAkB,aAAa,CAAC,GACrF,2BAA2B,MAAM,OAAOA,OAAK,aAAa,kBAAkB,kBAAkB,CAAC,GAC/F,uBAAuB,MAAM,iBAAiB,WAAW;AAI/D,QAAI,EAFF,qBAAqB,uBAAuB,4BAA4B;AAGxE,aAAO,EAAE,gBAAgB,CAAC,GAAG,WAAW,oBAAI,IAAI,EAAE;AAAA,EAEtD;AAGA,MAAM,MAAM,MAAM,eAAe,YAAY,aAAa,SAAS;AAAA,IACjE,gBAAgB;AAAA,EAClB,CAAC,GAGK,gBAAgB,CAACC,UAEjB,oBAAkB,CAAC,iBAAiBA,OAAM,cAAc,IAQxD,oBAAoB,yBAAyB,SAAS,GAGtD,iBAAgC,CAAC,GACjC,YAAY,oBAAI,IAAoB;AAC1C,WAAW,QAAQ,IAAI,OAAO;AAC5B,QAAM,IAAI,KAAK,MACT,aAAa,qBAAqB,CAAC;AAEzC,IAAI,eAAe,CAAC,KAAK,eAAe,cAAc,kBACjD,cAAc,CAAC,MAEpB,eAAe,KAAK,IAAI,IAEpB,eAAe,cAAc,aAAa,kBAAkB,IAAI,UAAU,MAC5E,UAAU,IAAI,YAAY,KAAK,OAAO;AAAA,EAE1C;AAEA,SAAO,EAAE,gBAAgB,UAAU;AACrC;;;ACtEA,SAAS,QAAAC,cAAY;;;ACOd,SAAS,aAAa,OAAuB;AAClD,SAAO,MAAM,QAAQ,uBAAuB,MAAM;AACpD;AAGO,SAAS,gBAAgB,aAA6B;AAC3D,SAAO,iBAAiB,WAAW;AACrC;AAGO,IAAM,eAAe;AAGrB,SAAS,qBAAqB,aAA6B;AAChE,MAAM,cAAc,aAAa,WAAW;AAC5C,SAAO,IAAI,OAAO,uBAAuB,WAAW,WAAW,GAAG;AACpE;AAMO,IAAM,qBAAqB;AA8B3B,SAAS,kCAAkC,SAAiB,aAAoC;AACrG,MAAI,CAAC,WAAW,CAAC,YAAa,QAAO;AAErC,MAAM,SAAS,qBAAqB,WAAW,GACzC,UAAU,oBAEV,YAAY,OAAO,KAAK,OAAO;AACrC,MAAI,CAAC,UAAW,QAAO;AAEvB,MAAM,aAAa,UAAU,QAAQ,UAAU,CAAC,EAAE,QAC5C,OAAO,QAAQ,MAAM,UAAU,GAC/B,aAAa,QAAQ,KAAK,IAAI;AACpC,MAAI,CAAC,WAAY,QAAO;AAExB,MAAM,iBAAiB,WAAW;AAElC,SADkB,KAAK,MAAM,GAAG,cAAc,EAC7B,KAAK;AACxB;;;ACjEO,SAAS,gCACd,iBACA,aACA,YACQ;AACR,MAAI,CAAC;AACH,UAAM,IAAI,MAAM,sCAAsC;AAGxD,MAAM,aAAa,gBAAgB,WAAW,GACxC,cAAc,cAGd,SAAS,qBAAqB,WAAW,GACzC,UAAU,oBAEV,YAAY,OAAO,KAAK,eAAe;AAE7C,MAAI,CAAC,WAAW;AAEd,QAAM,YAAY,gBAAgB,KAAK,IAAI;AAAA;AAAA,IAAS;AACpD,WAAO,gBAAgB,KAAK,IAAI,YAAY,aAAa;AAAA,IAAO,WAAW,KAAK,IAAI;AAAA,IAAO,cAAc;AAAA;AAAA,EAC3G;AAGA,MAAM,gBAAgB,gBAAgB,UAAU,GAAG,UAAU,KAAK,GAC5D,mBAAmB,UAAU,QAAQ,UAAU,CAAC,EAAE,QAClD,cAAc,gBAAgB,UAAU,gBAAgB,GAExD,aAAa,QAAQ,KAAK,WAAW;AAE3C,MAAI,CAAC,YAAY;AAEf,QAAM,YAAY,gBAAgB,KAAK,IAAI;AAAA;AAAA,IAAS;AACpD,WAAO,gBAAgB,KAAK,IAAI,YAAY,aAAa;AAAA,IAAO,WAAW,KAAK,IAAI;AAAA,IAAO,cAAc;AAAA;AAAA,EAC3G;AAGA,MAAM,sBAAsB,YAAY,UAAU,GAAG,WAAW,KAAK,EAAE,KAAK,GACtE,wBAAwB,WAAW,QAAQ,WAAW,CAAC,EAAE,QACzD,eAAe,YAAY,UAAU,qBAAqB,GAK1D,mBADqB,UAAU,CAAC,MACU,YAC1C,oBAAoB,wBAAwB,WAAW,KAAK;AAGlE,SAAI,oBAAoB,qBAFD,KAId,kBAIF,gBAAgB,aAAa;AAAA,IAAO,WAAW,KAAK,IAAI;AAAA,IAAO,cAAc;AACtF;;;AFpBA,eAAsB,uBACpB,KACA,aACA,WACA,WACkC;AAClC,MAAM,SAAkC;AAAA,IACtC,SAAS,CAAC;AAAA,IACV,SAAS,CAAC;AAAA,IACV,SAAS,CAAC;AAAA,EACZ,GAGM,eAAe,uBAAuB,SAAS;AAErD,SAAI,aAAa,SAAS,MAK1B,MAAM,2BAA2B,KAAK,aAAa,cAAc,MAAM,GAGnE,UAAU,SAAS,KACrB,MAAM,yBAAyB,KAAK,aAAa,cAAc,WAAW,MAAM,GAIlF,OAAO,UAAU,MAAM,KAAK,IAAI,IAAI,OAAO,OAAO,CAAC,GACnD,OAAO,UAAU,MAAM,KAAK,IAAI,IAAI,OAAO,OAAO,CAAC,GACnD,OAAO,UAAU,MAAM,KAAK,IAAI,IAAI,OAAO,OAAO,CAAC,GAGnD,OAAO,YAAY,OAAO,UAEnB;AACT;AAKA,SAAS,uBAAuB,OAA2C;AAEzE,MAAI,iBAAiB;AACnB,WAAO;AAIT,MAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,QAAM,MAAM,oBAAI,IAAoB;AACpC,aAAW,QAAQ,OAAO;AACxB,UAAM,WAAW,YAAY,KAAK,IAAI;AACtC,MAAI,YAAY,WAAW,QAAQ,KACjC,IAAI,IAAI,UAAU,KAAK,OAAO;AAAA,IAElC;AACA,WAAO;AAAA,EACT;AAGA,SAAO,IAAI,IAAI,OAAO,QAAQ,KAAK,CAAC;AACtC;AAKA,SAAS,WAAW,UAA2B;AAC7C,MAAM,gBAAgB,oBAAI,IAAY,CAAC,cAAc,SAAS,CAAC;AAG/D,WAAW,YAAY,gBAAgB,GAAG;AACxC,QAAM,MAAM,sBAAsB,QAAQ;AAC1C,IAAI,IAAI,YACN,cAAc,IAAI,IAAI,QAAQ;AAAA,EAElC;AAEA,SAAO,cAAc,IAAI,QAAQ;AACnC;AAKA,eAAe,2BACb,KACA,aACA,cACA,QACe;AACf,MAAM,gBAAgB,aAAa,IAAI,cAAc,SAAS;AAE9D,MAAI,GAAC,iBAAiB,CAAC,cAAc,KAAK;AAI1C,QAAI;AACF,UAAM,aAAaC,OAAK,KAAK,cAAc,SAAS;AAOpD,MANmB,MAAM;AAAA,QACvB;AAAA,QACA;AAAA,QACA,cAAc,KAAK;AAAA,MACrB,IAGE,OAAO,QAAQ,KAAK,cAAc,SAAS,IAE3C,OAAO,QAAQ,KAAK,cAAc,SAAS;AAAA,IAE/C,SAAS,OAAO;AACd,aAAO,MAAM,yCAAyC,cAAc,SAAS,KAAK,KAAK,EAAE,GACzF,OAAO,QAAQ,KAAK,cAAc,SAAS;AAAA,IAC7C;AACF;AAKA,eAAe,yBACb,KACA,aACA,cACA,WACA,QACe;AACf,WAAW,YAAY,WAAW;AAChC,QAAM,cAAc,sBAAsB,QAAQ;AAOlD,QALI,CAAC,YAAY,YAKb,YAAY,aAAa,cAAc;AACzC;AAIF,QAAI,UAAU,aAAa,IAAI,YAAY,QAAQ,GAC/C,iBAAiB,YAAY;AAOjC,QALI,CAAC,WAAW,aAAa,IAAI,cAAc,SAAS,MACtD,UAAU,aAAa,IAAI,cAAc,SAAS,GAClD,iBAAiB,cAAc,YAG7B,GAAC,WAAW,CAAC,QAAQ,KAAK;AAI9B,UAAI;AACF,YAAM,aAAaA,OAAK,KAAK,YAAY,QAAQ;AAOjD,QANmB,MAAM;AAAA,UACvB;AAAA,UACA;AAAA,UACA,QAAQ,KAAK;AAAA,QACf,IAGE,OAAO,QAAQ,KAAK,YAAY,QAAQ,IAExC,OAAO,QAAQ,KAAK,YAAY,QAAQ;AAAA,MAE5C,SAAS,OAAO;AACd,eAAO,MAAM,+BAA+B,YAAY,QAAQ,KAAK,KAAK,EAAE,GAC5E,OAAO,QAAQ,KAAK,YAAY,QAAQ;AAAA,MAC1C;AAAA,EACF;AACF;AAMA,eAAe,sBACb,YACA,aACA,aACkB;AAElB,MAAI,kBAAkB,IAClB,aAAa;AAQjB,MANI,MAAM,OAAO,UAAU,MACzB,kBAAkB,MAAM,aAAa,UAAU,GAC/C,aAAa,KAIX,cAC6B;AAAA,IAC7B;AAAA,IACA;AAAA,EACF,GAAG,KAAK,MAEuB;AAC7B,WAAO;AAKX,MAAM,gBAAgB;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,eAAM,cAAc,YAAY,aAAa,GAEtC;AACT;;;AGvPA,SAAS,QAAAC,QAAM,YAAAC,kBAAgB;AAC/B,SAAS,YAAYC,WAAU;;;ACP/B,SAAS,YAAYC,WAAU;AAC/B,SAAS,WAAAC,UAAS,QAAAC,QAAM,WAAW,mBAAmB;AACtD,YAAYC,WAAU;;;ACNf,SAASC,sBAAqB,QAA4B;AAC/D,SAAO,MAAM,KAAK,IAAI,IAAI,MAAM,CAAC,EAAE,KAAK;AAC1C;;;ADaA,IAAM,2BAA2B,GAC3B,kBAAkB;AAOxB,SAAS,gBAA0B;AACjC,SAAO,EAAE,iBAAiB,0BAA0B,UAAU,CAAC,EAAE;AACnE;AAEO,SAAS,gBAAgB,WAA2B;AACzD,SAAOC,OAAK,uBAAuB,SAAS,GAAG,cAAc,gBAAgB;AAC/E;AAEA,SAAS,wBAAwB,OAAwC;AACvE,MAAI,OAAO,SAAU,YAAY,UAAU,KAAM,QAAO;AAExD,MAAM,MAAM,OACN,MAAuB,CAAC;AA4B9B,MA1BI,OAAO,IAAI,WAAY,YAAY,IAAI,QAAQ,KAAK,EAAE,SAAS,MACjE,IAAI,UAAU,IAAI,QAAQ,KAAK,IAG7B,MAAM,QAAQ,IAAI,YAAY,MAChC,IAAI,eAAe,IAAI,aAAa;AAAA,IAClC,CAAC,MAAmB,OAAO,KAAM,YAAY,EAAE,KAAK,EAAE,SAAS;AAAA,EACjE,IAGE,OAAO,IAAI,QAAS,YAAY,IAAI,KAAK,KAAK,EAAE,SAAS,MAC3D,IAAI,OAAO,IAAI,KAAK,KAAK,IAGvB,OAAO,IAAI,QAAS,YAAY,IAAI,KAAK,KAAK,EAAE,SAAS,MAC3D,IAAI,OAAO,IAAI,KAAK,KAAK,IAGvB,OAAO,IAAI,OAAQ,YAAY,IAAI,IAAI,KAAK,EAAE,SAAS,MACzD,IAAI,MAAM,IAAI,IAAI,KAAK,IAGrB,OAAO,IAAI,OAAQ,YAAY,IAAI,IAAI,KAAK,EAAE,SAAS,MACzD,IAAI,MAAM,IAAI,IAAI,KAAK,IAGrB,OAAO,IAAI,eAAgB,YAAY,IAAI,gBAAgB,MAAM;AACnE,QAAM,IAAI,IAAI;AACd,IAAI,OAAO,EAAE,OAAQ,YAAY,OAAO,EAAE,aAAc,YAAY,OAAO,EAAE,cAAe,aAC1F,IAAI,cAAc,EAAE,KAAK,EAAE,KAAK,WAAW,EAAE,WAAW,YAAY,EAAE,WAAW;AAAA,EAErF;AAEA,SAAO;AACT;AAEA,SAAS,qBAAqB,MAAgC;AAC5D,MAAI,OAAO,QAAS,YAAY,SAAS,KAAM,QAAO;AAEtD,MAAM,MAAM;AAEZ,MAAI,IAAI,oBAAoB,UAAa,IAAI,oBAAoB;AAC/D,kBAAO,KAAK,iCAAiC,IAAI,eAAe,cAAc,wBAAwB,GAAG,GAClG;AAGT,MAAM,kBAAkB,IAAI;AAC5B,MAAI,OAAO,mBAAoB,YAAY,oBAAoB;AAC7D,WAAO,cAAc;AAGvB,MAAM,WAA4C,CAAC;AACnD,WAAW,CAAC,SAAS,QAAQ,KAAK,OAAO,QAAQ,eAA0C,GAAG;AAC5F,QAAI,OAAO,WAAY,YAAY,QAAQ,KAAK,EAAE,WAAW,EAAG;AAChE,QAAM,YAAY,wBAAwB,QAAQ;AAClD,IAAI,cACF,SAAS,OAAO,IAAI;AAAA,EAExB;AAEA,SAAO,EAAE,iBAAiB,0BAA0B,SAAS;AAC/D;AAGA,IAAM,iBAAiB,oBAAI,IAA4B;AAEhD,SAAS,wBAAwB,WAA0B;AAChE,EAAI,YACF,eAAe,OAAO,YAAY,SAAS,CAAC,IAE5C,eAAe,MAAM;AAEzB;AAMA,eAAsB,aAAa,WAA4C;AAC7E,MAAM,cAAc,YAAY,SAAS,GACnC,SAAS,eAAe,IAAI,WAAW;AAC7C,MAAI,OAAQ,QAAO;AAEnB,MAAM,eAAe,gBAAgB,SAAS;AAE9C,MAAI;AACF,QAAM,UAAU,MAAM,aAAa,YAAY,GACzC,SAAc,WAAK,OAAO,GAC1B,YAAY,qBAAqB,MAAM;AAC7C,QAAI,CAAC,WAAW;AACd,aAAO,KAAK,gCAAgC,YAAY,oBAAoB;AAC5E,UAAM,QAAQ,EAAE,MAAM,cAAc,UAAU,cAAc,EAAE;AAC9D,4BAAe,IAAI,aAAa,KAAK,GAC9B;AAAA,IACT;AACA,QAAM,SAAS,EAAE,MAAM,cAAc,UAAU,UAAU;AACzD,0BAAe,IAAI,aAAa,MAAM,GAC/B;AAAA,EACT,SAAS,OAAY;AACnB,IAAI,OAAO,OAAO,SAAS,YAAY,OAAO,SAAS,YACrD,OAAO,KAAK,8BAA8B,YAAY,KAAK,KAAK,EAAE;AAEpE,QAAM,QAAQ,EAAE,MAAM,cAAc,UAAU,cAAc,EAAE;AAC9D,0BAAe,IAAI,aAAa,KAAK,GAC9B;AAAA,EACT;AACF;AAEA,eAAsB,cAAc,QAAuC;AACzE,MAAM,eAAe,OAAO,MACtB,WAAW,OAAO,SAAS,YAAY,CAAC,GAExC,iBAAkD,CAAC;AACzD,WAAW,WAAW,OAAO,KAAK,QAAQ,GAAG;AAC3C,QAAM,MAAM,SAAS,OAAO,GACtB,MAAuB,CAAC;AAE9B,IAAI,IAAI,YAAS,IAAI,UAAU,IAAI,UAC/B,IAAI,gBAAgB,IAAI,aAAa,SAAS,MAChD,IAAI,eAAeC,sBAAqB,IAAI,YAAY,IAEtD,IAAI,gBAAa,IAAI,cAAc,IAAI,cACvC,IAAI,SAAM,IAAI,OAAO,IAAI,OACzB,IAAI,SAAM,IAAI,OAAO,IAAI,OACzB,IAAI,QAAK,IAAI,MAAM,IAAI,MACvB,IAAI,QAAK,IAAI,MAAM,IAAI,MAE3B,eAAe,OAAO,IAAI;AAAA,EAC5B;AAEA,QAAM,UAAUC,SAAQ,YAAY,CAAC;AAErC,MAAM,OAAY;AAAA,IAChB,EAAE,iBAAiB,0BAA0B,UAAU,eAAe;AAAA,IACtE,EAAE,WAAW,KAAK,UAAU,GAAK;AAAA,EACnC,GAEM,aAAa,GAAG,eAAe;AAAA;AAAA,EAAO,IAAI,IAC1C,WAAW,GAAG,YAAY;AAChC,MAAI;AACF,UAAMC,IAAG,UAAU,UAAU,YAAY,MAAM,GAC/C,MAAMA,IAAG,OAAO,UAAU,YAAY,GAEtC,wBAAwBD,SAAQA,SAAQ,YAAY,CAAC,CAAC;AAAA,EACxD,SAAS,OAAO;AACd,QAAI;AAAE,YAAMC,IAAG,OAAO,QAAQ;AAAA,IAAG,QAAQ;AAAA,IAA6B;AACtE,UAAM,IAAI,gBAAgB,6BAA6B,YAAY,IAAI,EAAE,MAAM,cAAc,MAAM,CAAC;AAAA,EACtG;AACF;AAEA,eAAsB,oBAAoB,WAAmB,aAAoC;AAC/F,MAAM,SAAS,MAAM,aAAa,SAAS;AAC3C,MAAK,OAAO,SAAS,SAAS,WAAW,GAEzC;AAAA,WAAO,OAAO,SAAS,SAAS,WAAW;AAG3C,aAAW,SAAS,OAAO,OAAO,OAAO,SAAS,QAAQ;AACxD,MAAI,MAAM,iBACR,MAAM,eAAe,MAAM,aAAa,OAAO,OAAK,MAAM,WAAW,GACjE,MAAM,aAAa,WAAW,KAAG,OAAO,MAAM;AAItD,UAAM,cAAc,MAAM;AAAA;AAC5B;;;AEhMA,SAAe,YAAAC,kBAAgB;;;ACIxB,SAAS,uBACd,iBACA,aACA,UACA,QACmB;AACnB,MAAM,SAA4B;AAAA,IAChC,SAAS,gBAAgB;AAAA,IACzB,gBAAgB,gBAAgB;AAAA,IAChC,cAAc,gBAAgB;AAAA,IAC9B,WAAW,gBAAgB,UAAU,IAAI,CAAC,OAAY;AAAA,MACpD,YAAY,EAAE;AAAA,MACd,UAAU;AAAA,QACR,EAAE,aAAa,EAAE,QAAQ,UAAU,GAAG,QAAQ,GAAK;AAAA,QACnD,GAAG,EAAE,OAAO,IAAI,CAAC,WAAmB;AAAA,UAClC,aAAa;AAAA,UACb,UAAU;AAAA,UACV,QAAQ;AAAA,QACV,EAAE;AAAA,MACJ;AAAA,MACA,SAAS,eAAe,EAAE,IAAI,KAAK,EAAE,MAAM,eAAe,EAAE,OAAO,KAAK,IAAI,CAAC;AAAA,IAC/E,EAAE;AAAA,IACF,QAAQ,gBAAgB;AAAA,IACxB,aAAa,gBAAgB;AAAA,IAC7B,aAAa,gBAAgB;AAAA,EAC/B;AAGA,SAAI,OAAO,iBAAiB,KAC1B,OAAO;AAAA,IACL,aAAa,OAAO,cAAc,cAAc,WAAW,gBAAgB,QAAQ,MAClF,SAAS,eAAe,WAAW,OAAO,YAAY;AAAA,EACzD,GAGK;AACT;AAKO,SAAS,oBAAuC;AACrD,SAAO;AAAA,IACL,SAAS;AAAA,IACT,gBAAgB;AAAA,IAChB,cAAc;AAAA,IACd,WAAW,CAAC;AAAA,IACZ,QAAQ,CAAC;AAAA,IACT,aAAa,CAAC;AAAA,IACd,aAAa,CAAC;AAAA,IACd,YAAY;AAAA,IACZ,gBAAgB,CAAC;AAAA,EACnB;AACF;;;AC3DO,SAAS,mBAAmB,UAAoB,KAAqB;AAC1E,MAAM,QAAgB,CAAC,GAEjB,oBAAoB,qBAAqB,GAAG;AAClD,EAAI,qBAAqB,kBAAkB,SAAS,KAClD,MAAM,KAAK,GAAG,iBAAiB;AAGjC,MAAM,aAAa,sBAAsB,UAAU,GAAG;AACtD,SAAI,WAAW,UAAU,WAAW,OAAO,SAAS,KAClD,MAAM,KAAK,GAAG,WAAW,MAAM,GAG1B;AACT;;;ACXO,SAAS,aAAa,WAAuC;AAClE,MAAI,UAAU,WAAW,GAEzB;AAAA,WAAO,KAAK,YAAY,UAAU,MAAM,gCAAgC;AACxE,aAAW,YAAY,WAAW;AAChC,UAAM,SAAS,SAAS,SAAS,KAAK,OAAK,EAAE,MAAM,GAC7C,QAAQ,SAAS,SAAS,KAAK,OAAK,CAAC,EAAE,MAAM;AACnD,aAAO;AAAA,QACL,KAAK,YAAY,SAAS,UAAU,CAAC,KAAK,QAAQ,WAAW,cAAc,QAAQ,QAAQ,gBAAgB,OAAO,WAAW;AAAA,MAC/H;AAAA,IACF;AAAA;AACF;AAOO,SAAS,UAAU,QAAkC;AAC1D,MAAI,OAAO,WAAW,GAEtB;AAAA,WAAO,MAAM,eAAe,OAAO,MAAM,6BAA6B;AACtE,aAAW,SAAS;AAClB,aAAO,MAAM,KAAK,MAAM,UAAU,KAAK,MAAM,OAAO,EAAE;AAAA;AAE1D;AASO,SAAS,oBAAoB,WAA2B;AAC7D,MAAI,UAAU,WAAW,GAEzB;AAAA,WAAO,KAAK,YAAY,UAAU,MAAM,iCAAiC;AACzE,aAAW,YAAY;AACrB,aAAO,KAAK,KAAK,QAAQ,EAAE;AAAA;AAE/B;AASO,SAAS,iBAAiB,QAAwB;AACvD,MAAI,OAAO,WAAW,GAEtB;AAAA,WAAO,MAAM,eAAe,OAAO,MAAM,8BAA8B;AACvE,aAAW,SAAS;AAClB,aAAO,MAAM,KAAK,KAAK,EAAE;AAAA;AAE7B;AAYO,SAAS,sBACd,QACA,aACA,UACA,QACM;AACN,EAAI,OAAO,iBAAiB,KAC1B,OAAO;AAAA,IACL,aAAa,OAAO,cAAc,cAAc,WAAW,gBAAgB,QAAQ,MAClF,SAAS,eAAe,WAAW,OAAO,YAAY;AAAA,EACzD,GAGF,aAAa,OAAO,SAAS,GAC7B,UAAU,OAAO,MAAM;AACzB;;;AC7FA,SAAS,QAAAC,QAAM,WAAAC,UAAS,YAAAC,YAAU,YAAAC,WAAU,WAAAC,gBAAe;;;ACO3D,SAAS,YAAYC,WAAU;AAC/B,OAAO,YAAY;AACnB,OAAOC,WAAU;AACjB,OAAOC,WAAU;AACjB,YAAYC,WAAU;AACtB,SAAS,SAAS,kBAAkB;AACpC,SAAS,gBAAgB;;;ACfzB,SAAS,eAAe;AAMxB,eAAsB,kBAAkB,SAAkC;AACxE,MAAI;AACF,WAAO,MAAM,QAAQ,OAAO;AAAA,EAC9B,SAAS,OAAO;AACd,kBAAO,KAAK,yCAAyC,KAAK,EAAE,GAE3C,GAAG,QAAQ,MAAM,IAAI,QAAQ,OAAO,CAAC,CAAC,IAAI,QAAQ,OAAO,QAAQ,SAAS,CAAC,CAAC;AAAA,EAE/F;AACF;;;ACJO,SAAS,4BACd,kBACA,gBACA,KACQ;AACR,MAAI;AACF,QAAM,UAAU,iBAAiB,KAAK;AACtC,QAAI,CAAC,QAAQ,SAAS,cAAc,QAAQ,KAAK,CAAC,QAAQ,WAAW,KAAK;AAExE,aAAO;AAGT,QAAM,EAAE,aAAa,MAAM,eAAe,IAAI,iBAAiB,gBAAgB,GACzE,EAAE,QAAQ,oBAAoB,IAAI,+BAA+B,aAAa,GAAG,GACjF,WAAW,oBAAoB,IAAI,cAAc,KAAK,CAAC,GAEvD,aAAa,UAAU,QAAQ,QAAQ;AAS7C,WALI,mBAAmB,UAAa,cAAc,aAAa,UAAU,KAKrE,mBAAmB,UAAa,OAAO,KAAK,UAAU,EAAE,WAAW,IAC9D,mBAGF,gBAAgB,YAAY,IAAI;AAAA,EACzC,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;ACkMO,SAASC,gBAAe,KAAUC,OAAmB;AAC1D,MAAI,CAACA;AACH,WAAO;AAGT,MAAM,OAAOA,MAAK,MAAM,GAAG,GACvB,UAAU;AAEd,WAAW,OAAO;AAChB,QAAI,WAAW,OAAO,WAAY,YAAY,OAAO;AACnD,gBAAU,QAAQ,GAAG;AAAA;AAErB;AAIJ,SAAO;AACT;AASO,SAASC,gBAAe,KAAUD,OAAc,OAAkB;AACvE,MAAI,CAACA;AACH;AAGF,MAAM,OAAOA,MAAK,MAAM,GAAG,GACvB,UAAU;AAGd,WAAS,IAAI,GAAG,IAAI,KAAK,SAAS,GAAG,KAAK;AACxC,QAAM,MAAM,KAAK,CAAC;AAElB,KAAI,EAAE,OAAO,YAAY,OAAO,QAAQ,GAAG,KAAM,cAC/C,QAAQ,GAAG,IAAI,CAAC,IAGlB,UAAU,QAAQ,GAAG;AAAA,EACvB;AAGA,MAAM,WAAW,KAAK,KAAK,SAAS,CAAC;AACrC,UAAQ,QAAQ,IAAI;AACtB;AAKO,SAASE,mBAAkB,KAAUF,OAAoB;AAC9D,MAAI,CAACA;AACH;AAGF,MAAM,OAAOA,MAAK,MAAM,GAAG,GACvB,UAAU;AAGd,WAAS,IAAI,GAAG,IAAI,KAAK,SAAS,GAAG,KAAK;AACxC,QAAM,MAAM,KAAK,CAAC;AAElB,QAAI,EAAE,OAAO,YAAY,OAAO,QAAQ,GAAG,KAAM;AAC/C;AAGF,cAAU,QAAQ,GAAG;AAAA,EACvB;AAGA,MAAM,WAAW,KAAK,KAAK,SAAS,CAAC;AACrC,SAAO,QAAQ,QAAQ;AACzB;;;AC3SO,SAAS,eAAe,MAAW,SAAiB,IAAc;AACvE,MAAI,OAAO,QAAS,YAAY,SAAS;AACvC,WAAO,SAAS,CAAC,MAAM,IAAI,CAAC;AAI9B,MAAI,MAAM,QAAQ,IAAI;AACpB,WAAO,SAAS,CAAC,MAAM,IAAI,CAAC;AAG9B,MAAM,OAAiB,CAAC;AACxB,WAAW,OAAO,OAAO,KAAK,IAAI,GAAG;AACnC,QAAM,WAAW,SAAS,GAAG,MAAM,IAAI,GAAG,KAAK;AAE/C,IAAI,OAAO,KAAK,GAAG,KAAM,YAAY,KAAK,GAAG,MAAM,QAAQ,CAAC,MAAM,QAAQ,KAAK,GAAG,CAAC,IAEjF,KAAK,KAAK,GAAG,eAAe,KAAK,GAAG,GAAG,QAAQ,CAAC,IAGhD,KAAK,KAAK,QAAQ;AAAA,EAEtB;AAEA,SAAO;AACT;AAcO,SAAS,gBAAgB,KAAU,SAAuB;AAC/D,MAAM,QAAQ,QAAQ,MAAM,GAAG,GACzB,OAAO,MAAM,IAAI,GAEnB,UAAU,KACRG,QAAc,CAAC,GAAG;AAExB,WAAW,QAAQ,OAAO;AACxB,QAAI,CAAC,QAAQ,IAAI,KAAK,OAAO,QAAQ,IAAI,KAAM;AAC7C;AAEF,cAAU,QAAQ,IAAI,GACtBA,MAAK,KAAK,OAAO;AAAA,EACnB;AAEA,SAAO,QAAQ,IAAI;AAGnB,WAAS,IAAIA,MAAK,SAAS,GAAG,IAAI,GAAG,KAAK;AACxC,QAAM,SAASA,MAAK,CAAC;AACrB,QAAI,OAAO,KAAK,MAAM,EAAE,WAAW,GAAG;AAEpC,UAAM,cAAcA,MAAK,IAAI,CAAC,GACxB,mBAAmB,MAAM,IAAI,CAAC;AACpC,MAAI,oBACF,OAAO,YAAY,gBAAgB;AAAA,IAEvC;AACE;AAAA,EAEJ;AACF;AAQO,SAAS,mBAAmB,MAAoB;AACrD,MAAI,QAAS,KAA4B,QAAO;AAChD,MAAI,OAAO,QAAS,SAAU,QAAO;AACrC,MAAI,MAAM,QAAQ,IAAI,EAAG,QAAO,KAAK,WAAW;AAEhD,MAAM,OAAO,OAAO,KAAK,IAAI;AAC7B,SAAI,KAAK,WAAW,IAAU,KAGvB,KAAK,MAAM,SAAO,mBAAmB,KAAK,GAAG,CAAC,CAAC;AACxD;;;ACnGA,SAAS,YAAYC,WAAU;AAC/B,OAAOC,WAAU;AACjB,SAAS,aAAAC,kBAAiB;AAsCnB,IAAM,wBAAN,MAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWjC,MAAM,QACJ,SACA,SACiC;AAEjC,WAAI,MAAM,QAAQ,OAAO,IAChB,KAAK,oBAAoB,SAAS,OAAO,IAK3C;AAAA,MACL,OAFY,MAAM,KAAK,qBAAqB,SAAS,QAAQ,OAAO;AAAA,MAGpE,UAAU,CAAC;AAAA,IACb;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAc,oBACZ,UACA,SACiC;AACjC,QAAI,SAAS,WAAW;AACtB,aAAO,EAAE,OAAO,CAAC,GAAG,UAAU,CAAC,8BAA8B,EAAE;AAGjE,QAAM,WAAqB,CAAC,GACtB,kBAA4B,CAAC;AAGnC,aAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AACxC,UAAM,iBAAiB,SAAS,CAAC,GAC3B,UAAU,MAAM,KAAK,qBAAqB,gBAAgB,QAAQ,OAAO;AAE/E,UAAI,QAAQ,SAAS,GAAG;AAEtB,iBAAS,IAAI,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AAC5C,cAAM,uBAAuB,SAAS,CAAC,GACjC,eAAe,MAAM,KAAK;AAAA,YAC9B;AAAA,YACA,QAAQ;AAAA,UACV;AAEA,cAAI,aAAa,SAAS,GAAG;AAC3B,gBAAM,UAAU,YAAY,cAAc,uBAAuB,IAAI,CAAC,uCAChC,oBAAoB,eAAe,IAAI,CAAC,wBACtD,aAAa,MAAM;AAE3C,qBAAS,KAAK,OAAO,GACrB,gBAAgB,KAAK,oBAAoB,GAErC,QAAQ,gBAAgB,MAC1B,OAAO,MAAM,OAAO;AAAA,UAExB;AAAA,QACF;AAEA,eAAO;AAAA,UACL,OAAO;AAAA,UACP;AAAA,UACA,gBAAgB;AAAA,UAChB,iBAAiB,gBAAgB,SAAS,IAAI,kBAAkB;AAAA,QAClE;AAAA,MACF;AAAA,IACF;AAGA,WAAO;AAAA,MACL,OAAO,CAAC;AAAA,MACR,UAAU,CAAC,+BAA+B,SAAS,MAAM,gBAAgB,SAAS,KAAK,IAAI,CAAC,EAAE;AAAA,IAChG;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAc,qBAAqB,SAAiB,SAAoC;AAEtF,QAAI,KAAK,cAAc,OAAO;AAC5B,aAAO,KAAK,mBAAmB,SAAS,OAAO;AAIjD,QAAM,WAAWC,MAAK,KAAK,SAAS,OAAO;AAG3C,WAFe,MAAc,OAAO,QAAQ,IAE5B,CAAC,QAAQ,IAAI,CAAC;AAAA,EAChC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,cAAc,SAA0B;AAC9C,WAAO,QAAQ,SAAS,GAAG,KAAK,QAAQ,SAAS,GAAG,KAAK,QAAQ,SAAS,GAAG;AAAA,EAC/E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAc,mBAAmB,SAAiB,SAAoC;AACpF,QAAM,UAAoB,CAAC,GAGrB,QAAQ,QAAQ,MAAM,GAAG,GACzB,WAAW,MAAM,UAAU,OAAK,KAAK,cAAc,CAAC,CAAC;AAE3D,QAAI,aAAa;AAEf,aAAO,CAACA,MAAK,KAAK,SAAS,OAAO,CAAC;AAIrC,QAAM,UAAUA,MAAK,KAAK,SAAS,GAAG,MAAM,MAAM,GAAG,QAAQ,CAAC,GACxD,cAAc,MAAM,MAAM,QAAQ,EAAE,KAAK,GAAG;AAGlD,WAAK,MAAc,OAAO,OAAO,KAKjC,MAAM,KAAK,kBAAkB,SAAS,aAAa,SAAS,OAAO,GAE5D,WANE,CAAC;AAAA,EAOZ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAc,kBACZ,KACA,SACA,SACA,SACe;AACf,QAAI;AACF,UAAM,UAAU,MAAMC,IAAG,QAAQ,KAAK,EAAE,eAAe,GAAK,CAAC;AAE7D,eAAW,SAAS,SAAS;AAC3B,YAAM,WAAWD,MAAK,KAAK,KAAK,MAAM,IAAI,GACpC,eAAeA,MAAK,SAAS,SAAS,QAAQ;AAEpD,QAAI,MAAM,YAAY,IAEhB,QAAQ,WAAW,IAAI,KAAK,QAAQ,SAAS,MAAM,IACrD,MAAM,KAAK,kBAAkB,UAAU,SAAS,SAAS,OAAO,IACvD,QAAQ,SAAS,GAAG,KAE7B,MAAM,KAAK,kBAAkB,UAAU,SAAS,SAAS,OAAO,IAEzD,MAAM,OAAO,KAElBE,WAAU,cAAc,SAAS,EAAE,KAAK,GAAM,CAAC,KACjD,QAAQ,KAAK,QAAQ;AAAA,MAG3B;AAAA,IACF,SAAS,OAAO;AAEd,aAAO,MAAM,2BAA2B,GAAG,KAAK,KAAK,EAAE;AAAA,IACzD;AAAA,EACF;AACF;;;ACnOO,SAAS,iCACd,MACA,QACA,UACU;AACV,MAAM,WAAqB,CAAC;AAC5B,MAAI,CAAC,OAAO,WAAY,QAAO;AAE/B,WAAW,CAAC,KAAK,UAAU,KAAK,OAAO,QAAQ,OAAO,UAAU,GAAG;AACjE,QAAI,EAAE,OAAO,MAAO;AACpB,QAAM,UAAU,eAAe,KAAK,GAAG,GAAG,YAAY,KAAK,QAAQ;AACnE,IAAI,WACF,SAAS,KAAK,OAAO;AAAA,EAEzB;AACA,SAAO;AACT;AAKA,SAAS,eACP,OACA,YACA,WACA,UACe;AACf,MAAI,CAAC,WAAW,KAAM,QAAO;AAE7B,MAAM,gBAAgB,MAAM,QAAQ,WAAW,IAAI,IAC/C,WAAW,OACX,CAAC,WAAW,IAAI,GAEd,aAAa,kBAAkB,KAAK;AAE1C,SAAK,cAAc,SAAS,UAAU,IAG/B,OAFE,GAAG,QAAQ,MAAM,SAAS,QAAQ,UAAU,cAAc,cAAc,KAAK,KAAK,CAAC;AAG9F;AAKA,SAAS,kBAAkB,OAAoB;AAC7C,SAAI,UAAU,OAAa,SACvB,MAAM,QAAQ,KAAK,IAAU,UAC1B,OAAO;AAChB;;;ANEO,SAAS,kBACd,MACA,YACA,YACA,SACS;AAOT,MALI,KAAK,OAAO,KAAK,QAAQ,KAAK,QAAQ,KAAK,QAAQ,KAAK,SAKxD,KAAK,UACH,KAAK,UAAU,UAAU,KAAK,UAAU;AAC1C,WAAO;AAMX,MAAM,YAAYC,MAAK,QAAQ,UAAU,EAAE,YAAY,GACjD,YAAYA,MAAK,QAAQ,UAAU,EAAE,YAAY;AAMvD,SALI,gBAAc,aAAa,6BAA6B,IAAI,SAAS,KAKrE,oBAAoB,IAAI,SAAS,KAAK,QAAQ,YAAY,QAAQ,cAAc;AAKtF;AAKO,IAAM,sBAAN,MAAkD;AAAA,EAIvD,YAAY,mBAAuC;AACjD,SAAK,oBAAoB,qBAAqB,0BAC9C,KAAK,iBAAiB,IAAI,sBAAsB;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,YAAY,MAAY,SAA2C;AACvE,QAAM,YAAY,KAAK,IAAI;AAC3B,QAAI;AAEF,UAAI,eAAe,KAAK;AACxB,UAAI,mBAAmB,KAAK,IAAI;AAC9B,YAAI;AACF,yBAAe,wBAAwB,KAAK,MAAiC,OAAO;AAAA,QACtF,SAAS,OAAO;AACd,iBAAO;AAAA,YACL,QAAQ;AAAA,YACR,QAAQ,KAAK,mBAAmB,KAAK,EAAE;AAAA,YACvC,SAAS;AAAA,YACT,aAAa;AAAA,YACb,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,YAC/D,eAAe,KAAK,IAAI,IAAI;AAAA,UAC9B;AAAA,QACF;AAIF,UAAI,aAAa,KAAK;AACtB,UAAI,mBAAmB,KAAK,EAAE;AAC5B,YAAI;AACF,uBAAa,wBAAwB,KAAK,IAAwB,OAAO;AAAA,QAC3E,SAAS,OAAO;AACd,iBAAO;AAAA,YACL,QAAQ,KAAK,qBAAqB,YAAY;AAAA,YAC9C,QAAQ;AAAA,YACR,SAAS;AAAA,YACT,aAAa;AAAA,YACb,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,YAC/D,eAAe,KAAK,IAAI,IAAI;AAAA,UAC9B;AAAA,QACF;AAIF,UAAI,OAAO,cAAe,UAAU;AAClC,YAAMC,WAAU,MAAM,KAAK,mBAAmB,EAAE,GAAG,MAAM,MAAM,cAAc,IAAI,WAAW,GAAG,OAAO;AAEtG,eAAO,KAAK,iBAAiBA,UAAS,SAAS;AAAA,MACjD;AAGA,UAAM,aAAa,KAAK,aAAa,EAAE,GAAG,MAAM,MAAM,cAAc,IAAI,WAAW,CAAC;AACpF,UAAI,CAAC,WAAW;AACd,eAAO;AAAA,UACL,QAAQ,KAAK,qBAAqB,YAAY;AAAA,UAC9C,QAAQ;AAAA,UACR,SAAS;AAAA,UACT,aAAa;AAAA,UACb,OAAO,IAAI,MAAM,iBAAiB,WAAW,OAAO,IAAI,OAAK,EAAE,OAAO,EAAE,KAAK,IAAI,CAAC,EAAE;AAAA,UACpF,eAAe,KAAK,IAAI,IAAI;AAAA,QAC9B;AAIF,UAAI,KAAK,QAAQ,CAAC,KAAK,kBAAkB,KAAK,MAAM,OAAO,GAAG;AAC5D,YAAM,aAAa,KAAK,qBAAqB,YAAY;AACzD,sBAAO,MAAM,kCAAkC,UAAU,OAAO,UAAU,EAAE,GACrE;AAAA,UACL,QAAQ;AAAA,UACR,QAAQ;AAAA,UACR,SAAS;AAAA,UACT,aAAa;AAAA,UACb,UAAU,CAAC,+BAA+B;AAAA,UAC1C,eAAe,KAAK,IAAI,IAAI;AAAA,QAC9B;AAAA,MACF;AAGA,UAAM,aAAa,MAAM,KAAK,qBAAqB,cAAc,OAAO,GAClE,cAAc,WAAW,OACzB,qBAAqB,WAAW;AAGtC,UAAI,YAAY,WAAW;AACzB,eAAO;AAAA,UACL,QAAQ,KAAK,qBAAqB,YAAY;AAAA,UAC9C,QAAQ;AAAA,UACR,SAAS;AAAA,UACT,aAAa;AAAA,UACb,UAAU,mBAAmB,SAAS,IAAI,qBAAqB,CAAC,0BAA0B;AAAA,UAC1F,eAAe,KAAK,IAAI,IAAI;AAAA,QAC9B;AAIF,UAAM,UAAwB,CAAC,GACzB,mBAAmB,KAAK,gBAAgB,YAAY;AAE1D,eAAW,cAAc,aAAa;AACpC,YAAM,aAAa,KAAK,sBAAsB,YAAY,kBAAkB,YAAsB,OAAO,GACnG,SAAS,MAAM,KAAK,gBAAgB,MAAM,YAAY,YAAY,OAAO;AAC/E,gBAAQ,KAAK;AAAA,UACX,GAAG;AAAA,UACH,eAAe,KAAK,IAAI,IAAI;AAAA,QAC9B,CAAC;AAAA,MACH;AAWA,aARI,mBAAmB,SAAS,KAAK,QAAQ,SAAS,MACpD,QAAQ,CAAC,EAAE,WAAW;AAAA,QACpB,GAAI,QAAQ,CAAC,EAAE,YAAY,CAAC;AAAA,QAC5B,GAAG;AAAA,MACL,IAIE,QAAQ,WAAW,IACd,QAAQ,CAAC,IAIX,KAAK,iBAAiB,SAAS,SAAS;AAAA,IACjD,SAAS,OAAO;AACd,aAAO;AAAA,QACL,QAAQ,mBAAmB,KAAK,IAAI,IAAI,aAAa,KAAK,qBAAqB,KAAK,IAAI;AAAA,QACxF,QAAQ,KAAK,mBAAmB,KAAK,EAAE;AAAA,QACvC,SAAS;AAAA,QACT,aAAa;AAAA,QACb,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,QAC/D,eAAe,KAAK,IAAI,IAAI;AAAA,MAC9B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAa,OAAe,SAA6C;AAC7E,QAAM,UAAwB,CAAC;AAE/B,aAAW,QAAQ,OAAO;AACxB,UAAM,SAAS,MAAM,KAAK,YAAY,MAAM,OAAO;AACnD,cAAQ,KAAK,MAAM;AAAA,IACrB;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,mBAAmB,MAAY,SAA6C;AAChF,QAAI,OAAO,KAAK,MAAO;AACrB,YAAM,IAAI,MAAM,iCAAiC;AAGnD,QAAM,cAAc,KAAK,IACnB,iBAAiB,KAAK,qBAAqB,KAAK,IAAI,GAGpD,aAAa,MAAM,KAAK,qBAAqB,KAAK,MAAM,OAAO,GAC/D,cAAc,WAAW;AAG/B,QAAI,YAAY,WAAW;AACzB,aAAO,OAAO,KAAK,WAAW,EAAE,IAAI,aAAW;AAAA,QAC7C,QAAQ;AAAA,QACR;AAAA,QACA,SAAS;AAAA,QACT,aAAa;AAAA,QACb,UAAU,WAAW,SAAS,SAAS,IAAI,WAAW,WAAW,CAAC,0BAA0B;AAAA,MAC9F,EAAE;AAIJ,QAAM,aAA2B,CAAC,GAC5B,mBAAmB,KAAK,gBAAgB,KAAK,IAAI;AAEvD,aAAW,cAAc,aAAa;AAEpC,UAAM,gBAAgB,MAAM,KAAK,eAAe,YAAY,OAAO;AAGnE,eAAW,CAAC,YAAY,UAAU,KAAK,OAAO,QAAQ,WAAW,GAAG;AAClE,YAAM,YAAY,KAAK,IAAI;AAE3B,YAAI;AAEF,cAAM,aAAmB;AAAA,YACvB,GAAG;AAAA,YACH,GAAG;AAAA,YACH,MAAM,KAAK;AAAA,YACX,IAAI;AAAA,UACN;AAGA,cAAI,WAAW,QAAQ,CAAC,KAAK,kBAAkB,WAAW,MAAM,OAAO,GAAG;AACxE,mBAAO,MAAM,+CAA+C,cAAc,OAAO,UAAU,EAAE,GAC7F,WAAW,KAAK;AAAA,cACd,QAAQ;AAAA,cACR,QAAQ;AAAA,cACR,SAAS;AAAA,cACT,aAAa;AAAA,cACb,UAAU,CAAC,+BAA+B;AAAA,cAC1C,eAAe,KAAK,IAAI,IAAI;AAAA,YAC9B,CAAC;AACD;AAAA,UACF;AAEA,cAAM,qBAAqB,KAAK,sBAAsB,YAAY,kBAAkB,YAAY,OAAO,GAGjG,SAAS,MAAM,KAAK;AAAA,YACxB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAEA,qBAAW,KAAK;AAAA,YACd,GAAG;AAAA,YACH,eAAe,KAAK,IAAI,IAAI;AAAA,UAC9B,CAAC;AAAA,QACH,SAAS,OAAO;AACd,qBAAW,KAAK;AAAA,YACd,QAAQ;AAAA,YACR,QAAQ;AAAA,YACR,SAAS;AAAA,YACT,aAAa;AAAA,YACb,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,YAC/D,eAAe,KAAK,IAAI,IAAI;AAAA,UAC9B,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,MAA8B;AACzC,QAAM,SAA4B,CAAC;AAYnC,QATK,KAAK,QACR,OAAO,KAAK,EAAE,SAAS,sCAAsC,MAAM,eAAe,CAAC,GAGhF,KAAK,MACR,OAAO,KAAK,EAAE,SAAS,oCAAoC,MAAM,aAAa,CAAC,GAI7E,KAAK,MAAM,mBAAmB,KAAK,EAAE,GAAG;AAC1C,UAAM,mBAAmB,yBAAyB,KAAK,EAAsB;AAC7E,UAAI,CAAC,iBAAiB;AACpB,iBAAW,SAAS,iBAAiB;AACnC,iBAAO,KAAK;AAAA,YACV,SAAS;AAAA,YACT,MAAM;AAAA,UACR,CAAC;AAAA,IAGP;AAgBA,QAbI,KAAK,QAAQ,KAAK,QACpB,OAAO,KAAK,EAAE,SAAS,2CAA2C,MAAM,sBAAsB,CAAC,GAI7F,KAAK,SAAS,CAAC,CAAC,QAAQ,WAAW,WAAW,WAAW,EAAE,SAAS,KAAK,KAAK,KAChF,OAAO,KAAK;AAAA,MACV,SAAS,2BAA2B,KAAK,KAAK;AAAA,MAC9C,MAAM;AAAA,IACR,CAAC,GAIC,KAAK;AACP,UAAI;AAEF,iBAAS,EAAE,MAAM,KAAK,MAAM,MAAM,CAAC,EAAE,CAAC;AAAA,MACxC,QAAgB;AACd,eAAO,KAAK;AAAA,UACV,SAAS,gCAAgC,KAAK,IAAI;AAAA,UAClD,MAAM;AAAA,QACR,CAAC;AAAA,MACH;AAIF,QAAI,KAAK,KAAK;AACZ,UAAM,wBAAwB,oBAAoB,KAAK,GAAG;AAC1D,UAAI,CAAC,sBAAsB;AACzB,iBAAW,SAAS,sBAAsB;AACxC,iBAAO,KAAK;AAAA,YACV,SAAS;AAAA,YACT,MAAM;AAAA,UACR,CAAC;AAAA,IAGP;AAEA,WAAO;AAAA,MACL,OAAO,OAAO,WAAW;AAAA,MACzB;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBACZ,MACA,YACA,YACA,SAC4C;AAE5C,QAAI,CAAC,KAAK,aAAa,MAAM,YAAY,YAAY,OAAO;AAC1D,aAAO,MAAM,KAAK,uBAAuB,MAAM,YAAY,YAAY,OAAO;AAIhF,QAAI;AACJ,QAAI,CAAC,QAAQ,QAAQ;AACnB,UAAM,mBAAmB,MAAc,aAAa,UAAU;AAC9D,0BAAoB,MAAM,kBAAkB,gBAAgB;AAAA,IAC9D;AAGA,QAAM,gBAAgB,MAAM,KAAK,eAAe,YAAY,OAAO;AAGnE,WAAO,EAAE,GADM,MAAM,KAAK,2BAA2B,MAAM,eAAe,YAAY,YAAY,OAAO,GACrF,kBAAkB;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,2BACZ,MACA,eACA,YACA,YACA,SAC4C;AAC5C,QAAM,WAAqB,CAAC,GACtB,YAA4B,CAAC,GAE/B,OAAO,cAAc,MACrB,cAAc;AAGlB,QAAI,KAAK,cAAc;AACrB,UAAM,SAAS,eAAe,WAAW,KAAK,YAAY;AAC1D,UAAI,QAAQ;AACV,YAAM,iBAAkB,QAAQ,OAAO,QAAS,YAAY,iBAAiB,OACzE,KAAK,cACJ,OAAO,QAAS,YAAY,SAAS,OAAO,OAAO;AAExD,YAAI,gBAAgB;AAClB,cAAM,iBAAiB;AAAA,YACrB;AAAA,YACA;AAAA,YACAD,MAAK,SAAS,QAAQ,aAAa,UAAU;AAAA,UAC/C;AACA,mBAAS,KAAK,GAAG,cAAc;AAAA,QACjC;AAAA,MACF;AAAA,IACF;AAEA,QAAI;AAEF,MAAI,KAAK,SACP,OAAO,KAAK,gBAAgB,MAAM,KAAK,IAAI,GAC3C,cAAc,KAIZ,KAAK,QACP,OAAO,KAAK,SAAS,MAAM,KAAK,IAAI,GACpC,cAAc,MACL,KAAK,SACd,OAAO,KAAK,SAAS,MAAM,KAAK,IAAI,GACpC,cAAc;AAMhB,UAAI,iBACA,YAAmB,CAAC,GACpB,UAAiB,CAAC;AAEtB,UAAI,KAAK,KAAK;AACZ,YAAM,QAAQ,iBAAiB,KAAK,GAAG;AAKvC,YAJA,YAAY,MAAM,WAClB,UAAU,MAAM,SAGZ,UAAU,SAAS,GAAG;AACxB,cAAM,aAAa,iBAAiB;AAAA,YAClC,UAAUA,MAAK,SAAS,YAAYA,MAAK,QAAQ,UAAU,CAAC;AAAA,YAC5D,SAASA,MAAK,SAASA,MAAK,QAAQ,UAAU,CAAC;AAAA,YAC/C,MAAMA,MAAK,SAAS,QAAQ,aAAa,UAAU;AAAA,YACnD,KAAKA,MAAK,QAAQ,UAAU;AAAA,UAC9B,CAAC,GAKK,eAAe,KAAK,aAAa,YAAY,EAAE,GAC/C,mBAAmB,iBAAiB,cAAc,iBAAiB;AACzE,UAAI,QAAQ,OAAO,QAAS,YAAY,iBAAiB,QAAQ,mBAC/D,KAAK,cAAc;AAAA,YACjB,KAAK;AAAA,YACL;AAAA,YACA;AAAA,YACA,KAAK;AAAA,UACP,IAEA,OAAO;AAAA,YACL;AAAA,YACA;AAAA,YACA;AAAA,YACA,KAAK;AAAA,UACP,GAEF,cAAc;AAAA,QAChB;AAAA,MACF;AASA,UAJE,EAAQ,KAAK,SACb,KAAK,UAAU,aACf,KAAK,UAAU,eAEM,OAAO,QAAS,YAAY,SAAS,MAAM;AAEhE,YAAM,gBAAiB,QAAQ,iBAAiB,OAAQ,KAAK,cAAc;AAC3E,QAAI,OAAO,iBAAkB,YAAY,kBAAkB,SACzD,kBAAkB,eAAe,aAAa;AAAA,MAElD;AAEA,UAAM,eAAe,MAAc,OAAO,UAAU;AASpD,UANI,KAAK,UACP,OAAO,KAAK,aAAa,MAAM,KAAK,KAAK,GACzC,cAAc,KAIZ,cAAc;AAChB,YAAM,gBAAgB,MAAM,KAAK,eAAe,YAAY,OAAO;AAGnE,YAAI,KAAK,UAAU,aAAa;AAE9B,cAAM,YAAY,cAAc,KAC1B,YAAY,cAAc;AAChC,iBAAO,gCAAgC,WAAW,QAAQ,aAAa,SAAS,GAChF,cAAc;AAAA,QAChB,OAAO;AAEL,cAAM,cAAc,KAAK;AAAA,YACvB;AAAA,YACA,cAAc;AAAA,YACd,KAAK,SAAS;AAAA,YACd;AAAA,UACF;AACA,iBAAO,YAAY,MACnB,UAAU,KAAK,GAAG,YAAY,SAAS,GACnC,YAAY,UAAU,SAAS,MACjC,cAAc;AAAA,QAElB;AAAA,MACF;AAIA,UAAI,QAAQ,SAAS,GAAG;AACtB,YAAM,aAAa,iBAAiB;AAAA,UAClC,UAAUA,MAAK,SAAS,YAAYA,MAAK,QAAQ,UAAU,CAAC;AAAA,UAC5D,SAASA,MAAK,SAASA,MAAK,QAAQ,UAAU,CAAC;AAAA,UAC/C,MAAMA,MAAK,SAAS,QAAQ,aAAa,UAAU;AAAA,UACnD,KAAKA,MAAK,QAAQ,UAAU;AAAA,QAC9B,CAAC,GAGK,eAAe,KAAK,aAAa,YAAY,EAAE,GAC/C,mBAAmB,iBAAiB,cAAc,iBAAiB;AACzE,QAAI,QAAQ,OAAO,QAAS,YAAY,iBAAiB,QAAQ,mBAC/D,KAAK,cAAc;AAAA,UACjB,KAAK;AAAA,UACL;AAAA,UACA;AAAA,UACA,KAAK;AAAA,QACP,IAEA,OAAO;AAAA,UACL;AAAA,UACA;AAAA,UACA;AAAA,UACA,KAAK;AAAA,QACP,GAEF,cAAc;AAAA,MAChB;AAGA,UAAI;AACJ,aAAK,QAAQ,WACX,cAAc,MAAM,KAAK,gBAAgB,YAAY,MAAM,cAAc,MAAM,IAG1E;AAAA,QACL,QAAQ;AAAA,QACR,QAAQ;AAAA,QACR,SAAS;AAAA,QACT;AAAA,QACA,MAAM;AAAA,QACN,OAAO,KAAK;AAAA,QACZ;AAAA,QACA,UAAU,SAAS,SAAS,IAAI,WAAW;AAAA,QAC3C,WAAW,UAAU,SAAS,IAAI,YAAY;AAAA,QAC9C,UAAU,KAAK,YAAY,IAAI;AAAA,MACjC;AAAA,IACF,SAAS,OAAO;AACd,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,QAAQ;AAAA,QACR,SAAS;AAAA,QACT;AAAA,QACA,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,QAC/D,UAAU,SAAS,SAAS,IAAI,WAAW;AAAA,MAC7C;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,eAAe,UAAkB,SAA+C;AACpF,QAAI,MAAM,MAAc,aAAa,QAAQ,GACvC,SAAS,KAAK,aAAa,UAAU,GAAG;AAG9C,YAAK,WAAW,cAAc,WAAW,SAAS,SAAS,YAAY,SAAS,cAAc,cAC5F,MAAM,4BAA4B,KAAK,QAAQ,UAAU,QAAQ,aAAa,IAKzE,EAAE,MAFI,KAAK,mBAAmB,KAAK,MAAM,GAEjC,QAAQ,IAAI;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,gBAAgB,UAAkB,SAAc,cAA2C;AAE/F,QAAM,eAAe,KAAK,aAAa,UAAU,EAAE,GAC7C,aAAa,KAAK,uBAAuB,SAAS,YAAY,GAC9D,OAAO,MAAM,kBAAkB,UAAU;AAG/C,QAAI,MAAc,OAAO,QAAQ;AAC/B,UAAI;AAEF,YADiB,MAAc,aAAa,UAAU,MAAM,MAC3C;AACf,iBAAO;AAAA,MAEX,QAAQ;AAAA,MAER;AAGF,iBAAc,UAAUA,MAAK,QAAQ,QAAQ,CAAC,GAC9C,MAAc,cAAc,UAAU,UAAU,GACzC;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,aAAa,MAAY,YAAoB,YAAoB,SAA+B;AACtG,WAAO,CAAC,kBAAkB,MAAM,YAAY,YAAY,OAAO;AAAA,EACjE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,uBACZ,MACA,YACA,YACA,SAC4C;AAC5C,QAAM,WAAqB,CAAC;AAE5B,QAAI;AAIF,UAFqB,MAAc,OAAO,UAAU,KAEhC,KAAK,SAAS,KAAK,UAAU,WAAW;AAG1D,eAAO,KAAK,oDAAoD,KAAK,KAAK,qCAAqC;AAC/G,YAAM,gBAAgB,MAAM,KAAK,eAAe,YAAY,OAAO;AACnE,eAAO,KAAK,2BAA2B,MAAM,eAAe,YAAY,YAAY,OAAO;AAAA,MAC7F;AAGA,UAAI;AACJ,UAAI,CAAC,QAAQ,QAAQ;AACnB,YAAM,UAAU,MAAME,IAAG,SAAS,UAAU;AAI5C,YAHA,cAAc,MAAM,kBAAkB,QAAQ,SAAS,MAAM,CAAC,GAG1D,MAAc,OAAO,UAAU;AACjC,cAAI;AACF,gBAAM,WAAW,MAAMA,IAAG,SAAS,UAAU;AAC7C,gBAAI,OAAO,QAAQ,SAAS,QAAQ,MAAM;AAExC,qBAAO;AAAA,gBACL,QAAQ;AAAA,gBACR,QAAQ;AAAA,gBACR,SAAS;AAAA,gBACT,aAAa;AAAA,gBACb;AAAA,gBACA,mBAAmB;AAAA,gBACnB,UAAU,SAAS,SAAS,IAAI,WAAW;AAAA,cAC7C;AAAA,UAEJ,QAAQ;AAAA,UAER;AAGF,cAAc,UAAUF,MAAK,QAAQ,UAAU,CAAC,GAChD,MAAME,IAAG,UAAU,YAAY,OAAO;AAGtC,YAAI;AACF,cAAM,aAAa,MAAMA,IAAG,KAAK,UAAU;AAC3C,UAAI,WAAW,OAAO,MACpB,MAAMA,IAAG,MAAM,YAAY,WAAW,IAAI;AAAA,QAE9C,QAAQ;AAAA,QAER;AAAA,MACF;AAEA,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,QAAQ;AAAA,QACR,SAAS;AAAA,QACT,aAAa;AAAA,QACb;AAAA,QACA,mBAAmB;AAAA,QACnB,UAAU,SAAS,SAAS,IAAI,WAAW;AAAA,MAC7C;AAAA,IACF,SAAS,OAAO;AACd,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,QAAQ;AAAA,QACR,SAAS;AAAA,QACT,aAAa;AAAA,QACb,OAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,QAC/D,UAAU,SAAS,SAAS,IAAI,WAAW;AAAA,MAC7C;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,mBAAmB,SAAiB,QAAyB;AAE3D,QAAI,CAAC,QAAQ,KAAK;AAChB,cAAQ,QAAQ;AAAA,QACd,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AAAA,QACL,KAAK;AACH,iBAAO,CAAC;AAAA,QACV,KAAK;AAAA,QACL,KAAK;AACH,iBAAO,EAAE,MAAM,GAAG;AAAA,QACpB,KAAK;AAAA,QACL,KAAK;AAAA,QACL;AACE,iBAAO;AAAA,MACX;AAGF,QAAI;AACF,cAAQ,QAAQ;AAAA,QACd,KAAK;AAAA,QACL,KAAK;AAEH,cAAM,UAAU;AAChB,cAAI,WAAW,SAAS;AACtB,gBAAM,SAAS,WAAW,OAAO;AACjC,gBAAI,WAAW;AACb,oBAAM,IAAI,MAAM,iCAAiC;AAEnD,mBAAO;AAAA,UACT;AACA,iBAAO,KAAK,MAAM,OAAO;AAAA,QAE3B,KAAK;AAAA,QACL,KAAK;AACH,iBAAOC,MAAK,KAAK,OAAO;AAAA,QAE1B,KAAK;AACH,cAAI;AACF,mBAAY,YAAM,OAAO;AAAA,UAC3B,SAAS,OAAO;AACd,kBAAM,IAAI,MAAM,qBAAqB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,EAAE;AAAA,UAC/F;AAAA,QAEF,KAAK;AAAA,QACL,KAAK;AACH,iBAAO,KAAK,cAAc,OAAO;AAAA,QAEnC,KAAK;AAAA,QACL,KAAK;AAAA,QACL;AACE,iBAAO;AAAA,MACX;AAAA,IACF,SAAS,OAAO;AACd,YAAM,IAAI,MAAM,mBAAmB,MAAM,aAAa,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,EAAE;AAAA,IAChH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,uBAAuB,SAAc,QAA4B;AAC/D,QAAI;AACF,cAAQ,QAAQ;AAAA,QACd,KAAK;AAAA,QACL,KAAK;AACH,iBAAO,KAAK,UAAU,SAAS,MAAM,CAAC;AAAA,QAExC,KAAK;AAAA,QACL,KAAK;AACH,iBAAOA,MAAK,KAAK,SAAS,EAAE,QAAQ,GAAG,WAAW,IAAI,WAAW,GAAG,CAAC;AAAA,QAEvE,KAAK;AAGH,cAAI,OAAO,WAAY;AACrB,mBAAO;AAET,cAAI;AAEF,gBAAI,OAAY,gBAAU,OAAO;AAGjC,mBAAI,WAAW,OAAO,WAAY,YAAY,QAAQ,gBACpD,OAAO,KAAK,yBAAyB,IAAI,IAGpC;AAAA,UACT,SAAS,OAAO;AACd,kBAAM,IAAI,MAAM,yBAAyB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,EAAE;AAAA,UACnG;AAAA,QAEF,KAAK;AAAA,QACL,KAAK;AACH,iBAAO,KAAK,kBAAkB,OAAO;AAAA,QAEvC,KAAK;AAAA,QACL,KAAK;AAAA,QACL;AACE,iBAAO,OAAO,WAAY,WAAW,UAAU,KAAK,UAAU,SAAS,MAAM,CAAC;AAAA,MAClF;AAAA,IACF,SAAS,OAAO;AACd,YAAM,IAAI,MAAM,uBAAuB,MAAM,aAAa,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,EAAE;AAAA,IACpH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,UAAkB,SAA6B;AAClE,QAAM,MAAMH,MAAK,QAAQ,QAAQ,EAAE,YAAY,GAEzC,SAAqC;AAAA,MACzC,SAAS;AAAA,MACT,UAAU;AAAA,MACV,SAAS;AAAA,MACT,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,OAAO;AAAA,MACP,QAAQ;AAAA,MACR,aAAa;AAAA,MACb,QAAQ;AAAA,IACV;AAEA,WAAI,OAAO,GAAG,IACL,OAAO,GAAG,IAIf,QAAQ,KAAK,EAAE,WAAW,GAAG,KAAK,QAAQ,KAAK,EAAE,WAAW,GAAG,IAC1D,QAAQ,SAAS,IAAI,KAAK,QAAQ,SAAS,IAAI,IAAI,UAAU,SAGlE,QAAQ,SAAS;AAAA,CAAO,KAAK,QAAQ,SAAS;AAAA;AAAA,CAAS,IAClD,aAGF;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,MAAW,UAAuB;AACxD,QAAI;AACF,UAAM,SAAS,SAAS,EAAE,MAAM,UAAU,MAAM,KAAK,CAAC;AACtD,aAAO,OAAO,WAAW,IAAI,OAAO,CAAC,IAAI;AAAA,IAC3C,SAAS,OAAO;AACd,YAAM,IAAI,MAAM,+BAA+B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,EAAE;AAAA,IACzG;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,SAAS,MAAW,MAAqB;AAC/C,QAAI,OAAO,QAAS,YAAY,SAAS;AACvC,aAAO;AAGT,QAAM,SAAc,MAAM,QAAQ,IAAI,IAAI,CAAC,IAAI,CAAC;AAEhD,aAAW,OAAO;AAChB,MAAI,IAAI,SAAS,GAAG,IAElB,KAAK,eAAe,QAAQ,KAAK,KAAK,eAAe,MAAM,GAAG,CAAC,IACtD,OAAO,SAChB,OAAO,GAAG,IAAI,KAAK,GAAG;AAI1B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,SAAS,MAAW,MAAqB;AAC/C,QAAI,OAAO,QAAS,YAAY,SAAS;AACvC,aAAO;AAGT,QAAM,SAAS,MAAM,QAAQ,IAAI,IAAI,CAAC,GAAG,IAAI,IAAI,EAAE,GAAG,KAAK;AAE3D,aAAW,OAAO;AAChB,MAAI,IAAI,SAAS,GAAG,IAElB,KAAK,kBAAkB,QAAQ,GAAG,IAElC,OAAO,OAAO,GAAG;AAIrB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBAAoB,MAAW,YAAsB,SAAoC;AACrG,QAAI,SAAS;AAEb,aAAW,iBAAiB;AAC1B,UAAI;AAGF,YAAM,EAAE,MAAM,QAAQ,IAAI,KAAK,mBAAmB,aAAa;AAE/D,eAAO,MAAM,uBAAuB,IAAI,IAAI,OAAO,GAGnD,SAAS,KAAK,kBAAkB,QAAQ,MAAM,QAAQ,OAAO;AAAA,MAC/D,SAAS,OAAO;AACd,cAAM,IAAI,MAAM,cAAc,aAAa,aAAa,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,EAAE;AAAA,MAClH;AAGF,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,mBAAmB,MAA+C;AACxE,QAAM,QAAQ,KAAK,MAAM,0BAA0B;AACnD,QAAI,CAAC;AACH,YAAM,IAAI,MAAM,oCAAoC,IAAI,EAAE;AAG5D,QAAM,CAAC,EAAE,MAAM,UAAU,IAAI;AAE7B,QAAI,CAAC;AACH,aAAO,EAAE,KAAK;AAIhB,QAAM,UAAe,CAAC,GAChB,QAAQ,WAAW,MAAM,GAAG,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC;AAErD,aAAW,QAAQ,OAAO;AACxB,UAAM,CAAC,KAAK,KAAK,IAAI,KAAK,MAAM,GAAG,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC;AAGtD,MAAI,MAAM,WAAW,GAAG,KAAK,MAAM,SAAS,GAAG,IAE7C,QAAQ,GAAG,IAAI,MAAM,MAAM,GAAG,EAAE,EAAE,MAAM,GAAG,EAAE,IAAI,OAAK,EAAE,KAAK,CAAC,IACrD,UAAU,UAAU,UAAU,UAEvC,QAAQ,GAAG,IAAI,UAAU,SACf,MAAM,OAAO,KAAK,CAAC,IAK7B,QAAQ,GAAG,IAAI,QAHf,QAAQ,GAAG,IAAI,OAAO,KAAK;AAAA,IAK/B;AAEA,WAAO,EAAE,MAAM,QAAQ;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,MAAW,KAAkB;AAChD,WAAO,EAAE,CAAC,GAAG,GAAG,KAAK;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA,EAKQ,aACN,QACA,QACA,UACA,SAC0C;AAC1C,QAAM,YAA4B,CAAC,GAE/B;AAEJ,YAAQ,UAAU;AAAA,MAChB,KAAK;AACH,iBAAS;AACT;AAAA,MAEF,KAAK;AACH,iBAAS,EAAE,GAAG,QAAQ,GAAG,OAAO;AAChC;AAAA,MAEF,KAAK;AACH,iBAAS,KAAK,UAAU,QAAQ,QAAQ,WAAW,OAAO;AAC1D;AAAA,MAEF,KAAK;AAGH,iBAAS;AACT;AAAA,MAEF;AACE,iBAAS;AAAA,IACb;AAEA,WAAO,EAAE,MAAM,QAAQ,UAAU;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA,EAKQ,UAAU,QAAa,QAAa,WAA2B,SAAsB,UAAkB,IAAS;AAKtH,QAJI,OAAO,UAAW,YAAY,WAAW,QAIzC,OAAO,UAAW,YAAY,WAAW;AAC3C,aAAO;AAGT,QAAI,MAAM,QAAQ,MAAM,KAAK,MAAM,QAAQ,MAAM;AAE/C,aAAO,CAAC,GAAG,QAAQ,GAAG,MAAM;AAG9B,QAAM,SAAS,EAAE,GAAG,OAAO;AAE3B,aAAW,OAAO,OAAO,KAAK,MAAM,GAAG;AACrC,UAAM,cAAc,UAAU,GAAG,OAAO,IAAI,GAAG,KAAK;AAEpD,MAAM,OAAO,SAEF,OAAO,OAAO,GAAG,KAAM,YAAY,OAAO,OAAO,GAAG,KAAM,WACnE,OAAO,GAAG,IAAI,KAAK,UAAU,OAAO,GAAG,GAAG,OAAO,GAAG,GAAG,WAAW,SAAS,WAAW,IAC7E,OAAO,GAAG,MAAM,OAAO,GAAG,MAEnC,UAAU,KAAK;AAAA,QACb,MAAM;AAAA,QACN,QAAQ,QAAQ;AAAA,QAChB,QAAQ,CAAC,UAAU;AAAA,QACnB,MAAM;AAAA,QACN,YAAY;AAAA,MACd,CAAC,GACD,OAAO,GAAG,IAAI,OAAO,GAAG,KAZxB,OAAO,GAAG,IAAI,OAAO,GAAG;AAAA,IAc5B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,WAAgB,SAA+B;AACvE,QAAI,UAAU;AACZ,aAAO,UAAU,IAAI,MAAM,CAAC,MAAW,KAAK,kBAAkB,GAAG,OAAO,CAAC;AAG3E,QAAI,UAAU;AACZ,aAAO,UAAU,GAAG,KAAK,CAAC,MAAW,KAAK,kBAAkB,GAAG,OAAO,CAAC;AAGzE,QAAI,UAAU;AACZ,aAAO,CAAC,KAAK,kBAAkB,UAAU,KAAK,OAAO;AAGvD,QAAI,UAAU,KAAK;AACjB,UAAM,CAAC,MAAM,KAAK,IAAI,UAAU,KAC1B,UAAU,KAAK,aAAa,MAAM,OAAO,GACzC,WAAW,KAAK,aAAa,OAAO,OAAO;AACjD,aAAO,YAAY,SAAS,QAAQ;AAAA,IACtC;AAEA,QAAI,UAAU,KAAK;AACjB,UAAM,CAAC,MAAM,KAAK,IAAI,UAAU,KAC1B,UAAU,KAAK,aAAa,MAAM,OAAO,GACzC,WAAW,KAAK,aAAa,OAAO,OAAO;AACjD,aAAO,eAAe,SAAS,QAAQ;AAAA,IACzC;AAEA,QAAI,UAAU,QAAQ;AACpB,UAAM,WAAWA,MAAK,KAAK,QAAQ,eAAe,UAAU,MAAM;AAElE,aAAO,OAAO,WAAW,QAAQ;AAAA,IACnC;AAEA,WAAI,UAAU,WACL,QAAQ,aAAa,UAAU,WAGjC;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,OAAe,SAA2B;AAC7D,QAAI,MAAM,WAAW,IAAI,GAAG;AAC1B,UAAM,UAAU,MAAM,MAAM,CAAC;AAC7B,aAAO,QAAQ,UAAU,OAAO;AAAA,IAClC;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,qBACZ,SACA,SACkD;AAClD,QAAI,mBAAmB,OAAO;AAC5B,YAAM,IAAI,MAAM,qEAAqE;AAGvF,QAAM,kBAAkB,SAClB,SAAS,MAAM,KAAK,eAAe,QAAQ,iBAAiB;AAAA,MAChE,SAAS,QAAQ;AAAA,MACjB,aAAa;AAAA,IACf,CAAC;AAED,WAAO;AAAA,MACL,OAAO,OAAO;AAAA,MACd,UAAU,OAAO;AAAA,IACnB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,sBAAsB,YAAoB,aAAqB,WAAmB,SAA8B;AAEtH,QAAM,eAAeA,MAAK,SAAS,QAAQ,aAAa,UAAU;AAGlE,QAAI,UAAU,SAAS,GAAG,GAAG;AAE3B,UAAI,UAAU,SAAS,IAAI,GAAG;AAC5B,YAAM,YAAY;AAAA,UAChB;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA,eAAOA,MAAK,KAAK,QAAQ,eAAe,SAAS;AAAA,MACnD;AAGA,UAAM,iBAAiBA,MAAK,SAAS,UAAU,GACzC,YAAYA,MAAK,QAAQ,UAAU,GACnC,aAAaA,MAAK,SAAS,YAAY,SAAS,GAEhD,UAAU,UAAU,MAAM,GAAG,GAC7B,WAAW,QAAQ,CAAC,GACpB,WAAW,QAAQ,CAAC,KAAK,IAEzB,YAAY,SAAS,WAAW,GAAG,IAAI,WAAY,YAAY,UAC/D,iBAAiB,aAAa,WAG9B,yBAAyB,gCAAgC,cAAc,GAEvE,aAAa,WAAW;AAC9B,aAAOA,MAAK,KAAK,QAAQ,eAAe,UAAU;AAAA,IACpD;AAGA,WAAOA,MAAK,KAAK,QAAQ,eAAe,SAAS;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA,EAKQ,YAAY,MAAsB;AACxC,QAAM,WAAqB,CAAC,MAAM;AAElC,WAAI,KAAK,QAAM,SAAS,KAAK,SAAS,GAClC,KAAK,QAAM,SAAS,KAAK,MAAM,GAC/B,KAAK,QAAM,SAAS,KAAK,MAAM,GAC/B,KAAK,OAAK,SAAS,KAAK,KAAK,GAC7B,KAAK,SAAO,SAAS,KAAK,OAAO,GACjC,KAAK,SAAO,SAAS,KAAK,SAAS,KAAK,KAAK,EAAE,GAEnD,SAAS,KAAK,OAAO,GAEd;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,qBAAqB,SAAuD;AAClF,QAAI,mBAAmB,OAAO;AAC5B,aAAO;AAGT,QAAM,kBAAkB;AACxB,WAAO,MAAM,QAAQ,eAAe,IAAI,gBAAgB,KAAK,IAAI,IAAI;AAAA,EACvE;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,mBAAmB,SAA+D;AACxF,WAAI,OAAO,WAAY,WACd,UAEL,mBAAmB,OAAO,IACrB,aAEF,OAAO,KAAK,OAA2B,EAAE,KAAK,IAAI;AAAA,EAC3D;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,gBAAgB,SAAuD;AAC7E,QAAI,mBAAmB,OAAO;AAC5B,YAAM,IAAI,MAAM,oFAAoF;AAGtG,QAAM,kBAAkB;AACxB,WAAO,MAAM,QAAQ,eAAe,IAAI,gBAAgB,CAAC,IAAI;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,SAAuB,WAA+B;AAC7E,QAAM,aAAa,QAAQ,OAAO,OAAK,EAAE,OAAO,GAC1C,SAAS,QAAQ,OAAO,OAAK,CAAC,EAAE,OAAO;AAE7C,WAAI,OAAO,SAAS,IACX;AAAA,MACL,QAAQ,QAAQ,CAAC,GAAG,UAAU;AAAA,MAC9B,QAAQ,QAAQ,IAAI,OAAK,EAAE,MAAM,EAAE,KAAK,IAAI;AAAA,MAC5C,SAAS;AAAA,MACT,aAAa,QAAQ,KAAK,OAAK,EAAE,WAAW;AAAA,MAC5C,OAAO,OAAO,CAAC,GAAG;AAAA,MAClB,UAAU,QAAQ,QAAQ,OAAK,EAAE,YAAY,CAAC,CAAC;AAAA,MAC/C,eAAe,KAAK,IAAI,IAAI;AAAA,IAC9B,IAGK;AAAA,MACL,QAAQ,QAAQ,CAAC,GAAG,UAAU;AAAA,MAC9B,QAAQ,QAAQ,QAAQ,OAAK,OAAO,EAAE,UAAW,WAAW,CAAC,EAAE,MAAM,IAAI,EAAE,MAAM;AAAA,MACjF,SAAS;AAAA,MACT,aAAa,QAAQ,KAAK,OAAK,EAAE,WAAW;AAAA,MAC5C,UAAU,QAAQ,QAAQ,OAAK,EAAE,YAAY,CAAC,CAAC;AAAA,MAC/C,WAAW,QAAQ,QAAQ,OAAK,EAAE,aAAa,CAAC,CAAC;AAAA,MACjD,eAAe,KAAK,IAAI,IAAI;AAAA,IAC9B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,SAAsB;AAC1C,WAAO,sBAAsB,OAAO;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,SAAsB;AAC9C,WAAO,0BAA0B,OAAO;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,SAAyB;AAEjD,QAAI,SAAS,QAAQ,QAAQ,aAAa,EAAE;AAE5C,oBAAS,OAAO,QAAQ,qBAAqB,EAAE,GACxC;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,KAAUA,OAAmB;AAClD,WAAOI,gBAAe,KAAKJ,KAAI;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA,EAKQ,eAAe,KAAUA,OAAc,OAAkB;AAC/D,IAAAK,gBAAe,KAAKL,OAAM,KAAK;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,KAAUA,OAAoB;AACtD,IAAAM,mBAAkB,KAAKN,KAAI;AAAA,EAC7B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,yBAAyB,MAAsB;AACrD,QAAM,aAAa,CAAC,gBAAgB,kBAAkB,GAClD,SAAS;AAEb,aAAW,OAAO,YAAY;AAE5B,UAAM,UAAU,IAAI;AAAA,QAClB,6CAA6C,GAAG;AAAA,QAChD;AAAA,MACF;AAEA,eAAS,OAAO,QAAQ,SAAS,CAAC,OAAO,YAAY,YAAY;AAC/D,YAAM,QAAkB,CAAC,GACnB,QAAQ,QAAQ,KAAK,EAAE,MAAM;AAAA,CAAI;AAEvC,iBAAW,QAAQ,OAAO;AACxB,cAAM,UAAU,KAAK,KAAK;AAC1B,cAAI,CAAC,WAAW,QAAQ,WAAW,GAAG,EAAG;AAEzC,cAAM,UAAU,QAAQ,MAAM,uBAAuB;AACrD,cAAI,SAAS;AACX,gBAAM,CAAC,EAAE,GAAG,CAAC,IAAI;AACjB,kBAAM,KAAK,IAAI,CAAC,OAAO,CAAC,EAAE;AAAA,UAC5B;AAAA,QACF;AAEA,YAAI,MAAM,WAAW,EAAG,QAAO;AAE/B,YAAM,cAAc,KAAK,MAAM,KAAK,IAAI,CAAC;AACzC,eAAO,GAAG,GAAG,MAAM,WAAW;AAAA,MAChC,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,EACT;AACF;AAKO,SAAS,qBAAmC;AACjD,SAAO,IAAI,oBAAoB;AACjC;;;AOp5CO,SAAS,mBAAmB,OAA8B;AAC/D,MAAM,aAAa,MAAM,QAAQ,GAAG;AACpC,MAAI,eAAe;AACjB,WAAO,EAAE,KAAK,OAAO,MAAM,MAAM;AAGnC,MAAM,SAAS,MAAM,MAAM,GAAG,UAAU,GAClC,OAAO,MAAM,MAAM,aAAa,CAAC;AAEvC,MAAI,CAAC;AACH,WAAO,EAAE,KAAK,OAAO,MAAM,MAAM;AAGnC,MAAM,aAAa,cAAc,MAAM;AAEvC,SAAI,eAAe,UACV,EAAE,KAAK,OAAO,MAAM,MAAM,IAG5B,EAAE,KAAK,OAAO,MAAM,WAAW;AACxC;;;ACzCA,SAAS,YAAAO,YAAU,QAAAC,cAAY;AAC/B,SAAS,WAAAC,gBAAe;AACxB,OAAO,QAAQ;AACf,SAAS,aAAAC,kBAAiB;AAC1B,SAAS,UAAAC,eAAc;AAyDvB,eAAsB,mBACpB,eACA,gBAC8B;AAC9B,SAAO,MAAM,iCAAiC,EAAE,eAAe,eAAe,CAAC;AAG/E,MAAI,YAAY,MAAM,qBAAqB,aAAa,GAGlD,mBAAmB,mBAAmB,cAAc;AAQ1D,MAPI,oBAAoB,iBAAiB,SAAS,KAChD,YAAY,UAAU,OAAO,OAAK,iBAAiB,SAAS,EAAE,YAAY,CAAC,CAAC,GAC5E,OAAO,MAAM,0BAA0B,UAAU,KAAK,IAAI,KAAK,MAAM,EAAE,KAEvE,OAAO,MAAM,uBAAuB,UAAU,KAAK,IAAI,KAAK,MAAM,EAAE,GAGlE,UAAU,WAAW;AACvB,kBAAO,MAAM,oCAAoC,GAC1CC,mBAAkB;AAI3B,MAAM,WAAW,6BAA6B,WAAW,aAAa;AACtE,SAAO,MAAM,aAAa,SAAS,MAAM,kBAAkB,UAAU,MAAM,YAAY;AAGvF,MAAM,kBAAkB,MAAM,0BAA0B,UAAU,aAAa;AAC/E,SAAO,MAAM,cAAc,gBAAgB,IAAI,6BAA6B;AAG5E,MAAM,eAAe,MAAM,qBAAqB,aAAa;AAC7D,SAAO,MAAM,UAAU,aAAa,IAAI,2BAA2B;AAGnE,MAAM,iBAAiB,qBAAqB,iBAAiB,YAAY;AACzE,gBAAO,MAAM,eAAe,eAAe,MAAM,kBAAkB,GAG5D,oBAAoB,cAAc;AAC3C;AAMA,SAAS,6BACP,WACA,eACe;AACf,MAAM,WAA0B,CAAC;AAEjC,WAAW,YAAY,WAAW;AAChC,QAAM,aAAa,sBAAsB,UAAU,aAAa;AAGhE,aAAW,QAAQ,WAAW,QAAQ;AACpC,UAAI,KAAK,SAAU;AACnB,UAAM,iBAAiB,kBAAkB,MAAM,aAAa;AAE5D,eAAW,WAAW,gBAAgB;AAEpC,YAAM,WAAW,wBAAwB,IAAI;AAC7C,iBAAS,KAAK;AAAA,UACZ;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAOA,SAAS,wBAAwB,MAAoB;AACnD,MAAM,YAAY,KAAK,MAGnB;AACJ,MAAI,OAAO,aAAc;AACvB,kBAAc;AAAA,WACL,MAAM,QAAQ,SAAS;AAEhC,kBAAc,UAAU,CAAC,KAAK;AAAA,WACrB,OAAO,aAAc,YAAY,aAAa,WAAW;AAGlE,QAAM,aAAa;AAGnB,QAAI,WAAW,QAAQ;AACrB,UAAI,OAAO,WAAW,QAAQ,WAAY;AACxC,sBAAc,WAAW,QAAQ;AAAA,eACxB,OAAO,WAAW,QAAQ,WAAY,YAAY,aAAa,WAAW,QAAQ;AAC3F,sBAAc,WAAW,QAAQ,QAAQ;AAAA;AAEzC,eAAO;AAAA,aAEA,WAAW,QAAQ,SAAS,WAAW,QAAQ,MAAM,SAAS,GAAG;AAE1E,UAAM,YAAY,WAAW,QAAQ,MAAM,CAAC,EAAE;AAC9C,UAAI,OAAO,aAAc;AACvB,sBAAc;AAAA,eACL,OAAO,aAAc,YAAY,aAAa;AACvD,sBAAc,UAAU;AAAA;AAExB,eAAO;AAAA,IAEX;AACE,aAAO;AAAA,EAEX,WAAW,OAAO,aAAc,YAAY,aAAa;AAGvD,kBADmB,UACM;AAAA;AAEzB,WAAO;AAWT,MAAM,YADQ,YAAY,MAAM,GAAG,EACX,CAAC;AAGzB,SAAI,UAAU,SAAS,GAAG,IAEpB,cAAc,eAAe,cAAc,aACtC,SAEL,cAAc,eAGd,cAAc,eAAe,cAAc,aAAa,cAAc,YACjE,WAGQ,UAAU,QAAQ,YAAY,EAAE,EAAE,YAAY,KAC5C,UAId,aAAa;AACtB;AAMA,SAAS,kBAAkB,MAAY,eAAiC;AACtE,MAAM,UAAU,KAAK;AAGrB,MAAI,OAAO,WAAY,YAAY,aAAa,SAAS;AACvD,QAAM,aAAa,SACb,WAAqB,CAAC,GAItB,WADiB,cAAc,QAAQ,QAAQ,EAAE,MACnBC,SAAQ;AAG5C,QAAI,WAAW,QAAQ;AACrB,eAAW,YAAY,WAAW,QAAQ;AAExC,YAAI,SAAS,YAAY,QAAQ,UAAU;AAEzC,cAAI,OAAO,SAAS,SAAU;AAC5B,mBAAO,CAAC,SAAS,KAAK;AACjB,cAAI,OAAO,SAAS,SAAU,YAAY,aAAa,SAAS;AACrE,mBAAO,CAAC,SAAS,MAAM,OAAO;AAAA,QAElC;AAAA;AAKJ,QAAI,WAAW,QAAQ,SAAS;AAC9B,UAAI,OAAO,WAAW,QAAQ,WAAY;AACxC,eAAO,CAAC,WAAW,QAAQ,OAAO;AAC7B,UAAI,OAAO,WAAW,QAAQ,WAAY,YAAY,aAAa,WAAW,QAAQ;AAC3F,eAAO,CAAC,WAAW,QAAQ,QAAQ,OAAO;AAAA,IAE9C;AAGA,QAAI,WAAW,QAAQ;AACrB,eAAW,YAAY,WAAW,QAAQ;AACxC,QAAI,OAAO,SAAS,SAAU,WAC5B,SAAS,KAAK,SAAS,KAAK,IACnB,OAAO,SAAS,SAAU,YAAY,aAAa,SAAS,SACrE,SAAS,KAAK,SAAS,MAAM,OAAO;AAK1C,WAAO;AAAA,EACT;AAGA,SAAI,OAAO,WAAY,YAAY,aAAa,WAAW,OAAO,QAAQ,WAAY,WAC7E,CAAC,QAAQ,OAAO,IAIrB,OAAO,WAAY,WACd,CAAC,OAAO,IAIb,MAAM,QAAQ,OAAO,IAChB,QAAQ,OAAO,CAAC,MAAmB,OAAO,KAAM,QAAQ,IAG1D,CAAC;AACV;AAmCO,SAAS,sBAAsB,SAA6D;AACjG,MAAM,aAAa,QAAQ,QAAQ,OAAO,GAAG;AAE7C,MAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,WAAO,EAAE,MAAM,MAAM,UAAU,GAAK;AAGtC,MAAM,WAAW,WAAW,MAAM,GAAG,GAC/B,cAAc,CAAC,QAAgB,kBAAkB,KAAK,GAAG,GACzD,iBAA2B,CAAC;AAElC,WAAW,OAAO;AAChB,QAAK,KACL;AAAA,UAAI,YAAY,GAAG,EAAG;AACtB,qBAAe,KAAK,GAAG;AAAA;AAGzB,SAAI,eAAe,WAAW,IACrB,EAAE,MAAM,MAAM,UAAU,GAAM,IAGhC,EAAE,MAAM,eAAe,KAAK,GAAG,GAAG,UAAU,GAAM;AAC3D;AAEA,IAAM,eAAe,CAAC,sBAAsB,sBAAsB,YAAY;AAY9E,eAAe,0BACb,UACA,eACqC;AACrC,MAAM,WAAW,oBAAI,IAA2B,GAE1C,mBAAkC,CAAC,GACnC,iBAAgC,CAAC,GACjC,eAAe,oBAAI,IAA2B;AAEpD,WAAW,eAAe,UAAU;AAClC,QAAM,EAAE,MAAM,SAAS,IAAI,sBAAsB,YAAY,OAAO;AAEpE,IAAI,WACF,iBAAiB,KAAK,WAAW,IACxB,SAAS,OAClB,eAAe,KAAK,WAAW,KAE1B,aAAa,IAAI,IAAI,KACxB,aAAa,IAAI,MAAM,CAAC,CAAC,GAE3B,aAAa,IAAI,IAAI,EAAG,KAAK,WAAW;AAAA,EAE5C;AASA,MAPI,eAAe,SAAS,KAC1B,OAAO;AAAA,IACL,YAAY,eAAe,MAAM,gDACjC,eAAe,IAAI,OAAK,EAAE,OAAO,EAAE,KAAK,IAAI;AAAA,EAC9C,GAGE,iBAAiB,SAAS;AAC5B,QAAI;AACF,UAAM,iBAAiB,iBAAiB,IAAI,OAAK,EAAE,OAAO,GACpD,UAAU,MAAM,GAAG,gBAAgB;AAAA,QACvC,KAAK;AAAA,QACL,KAAK;AAAA,QACL,WAAW;AAAA,QACX,MAAM;AAAA,QACN,QAAQ;AAAA,MACV,CAAC;AAED,eAAW,gBAAgB;AACzB,sBAAc,UAAU,cAAc,kBAAkB,aAAa;AAAA,IAEzE,SAAS,OAAO;AACd,aAAO,MAAM,qCAAqC,EAAE,MAAM,CAAC;AAAA,IAC7D;AAGF,WAAW,CAAC,MAAM,aAAa,KAAK;AAClC,QAAI;AACF,UAAM,cAAc,cAAc,IAAI,OAChC,QAAQ,EAAE,QAAQ,WAAW,OAAO,GAAG,IAClC,EAAE,QAAQ,MAAM,KAAK,SAAS,CAAC,IAEjC,EAAE,OACV,GAEK,UAAU,MAAM,GAAG,aAAa;AAAA,QACpC,KAAK,OAAOC,OAAK,eAAe,IAAI,IAAI;AAAA,QACxC,KAAK;AAAA,QACL,WAAW;AAAA,QACX,QAAQ;AAAA,MACV,CAAC;AAED,eAAW,mBAAmB,SAAS;AACrC,YAAM,eAAe,OAAO,GAAG,IAAI,IAAI,eAAe,KAAK;AAC3D,sBAAc,UAAU,cAAc,eAAe,aAAa;AAAA,MACpE;AAAA,IACF,SAAS,OAAO;AACd,aAAO,MAAM,gCAAgC,IAAI,KAAK,EAAE,MAAM,CAAC;AAAA,IACjE;AAGF,SAAO;AACT;AAKA,SAAS,cACP,UACA,cACA,UACA,eACM;AACN,MAAM,WAAWC,WAAS,YAAY;AACtC,MAAIC,QAAO,QAAQ,EAAG;AAEtB,MAAM,eAAeF,OAAK,eAAe,YAAY,GAC/C,iBAAiB,2BAA2B,YAAY;AAE9D,MAAI,SAAS,IAAI,cAAc,EAAG;AAElC,MAAM,kBAAkB,SAAS;AAAA,IAAK,OACpCG,WAAU,cAAc,EAAE,SAAS,EAAE,KAAK,GAAK,CAAC;AAAA,EAClD,KAAK,SAAS,CAAC;AAEf,WAAS,IAAI,gBAAgB;AAAA,IAC3B,cAAc;AAAA,IACd,eAAe,2BAA2B,YAAY;AAAA,IACtD,UAAU,gBAAgB;AAAA,IAC1B,aAAa,gBAAgB;AAAA,IAC7B,UAAU,gBAAgB;AAAA,EAC5B,CAAC;AACH;AAMA,eAAe,qBAAqB,eAA6C;AAC/E,MAAM,eAAe,oBAAI,IAAY;AAErC,MAAI;AACF,QAAM,EAAE,MAAM,IAAI,MAAM,mBAAmB,aAAa;AAGxD,aAAW,CAAC,aAAa,WAAW,KAAK,OAAO,QAAQ,MAAM,QAAQ,GAAG;AACvE,UAAM,eAAe,YAAY,SAAS,CAAC;AAE3C,eAAW,CAAC,WAAW,OAAO,KAAK,OAAO,QAAQ,YAAY;AAC5D,YAAK,MAAM,QAAQ,OAAO;AAE1B,mBAAW,UAAU,SAAS;AAE5B,gBAAM,aAAa,OAAO,UAAW,WAAW,SAAS,OAAO,QAG1D,WAAW,oBAAoB,YAAY,aAAa,GACxD,aAAa,2BAA2B,SAAS,QAAQ;AAC/D,yBAAa,IAAI,UAAU;AAAA,UAC7B;AAAA,IAEJ;AAAA,EACF,SAAS,OAAO;AACd,WAAO,MAAM,kCAAkC,EAAE,MAAM,CAAC;AAAA,EAC1D;AAEA,SAAO;AACT;AAKA,SAAS,qBACP,iBACA,cACiB;AACjB,MAAM,YAA6B,CAAC;AAEpC,WAAW,CAAC,cAAc,QAAQ,KAAK,iBAAiB;AACtD,QAAM,aAAa,2BAA2B,YAAY;AAE1D,IAAK,aAAa,IAAI,UAAU,KAC9B,UAAU,KAAK,QAAQ;AAAA,EAE3B;AAGA,SAAO,UAAU,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,cAAc,EAAE,aAAa,CAAC;AAChF;AAKA,SAAS,oBAAoB,OAA6C;AACxE,MAAM,iBAAiB,oBAAI,IAA+B,GACpD,iBAAiB,oBAAI,IAA6B;AAExD,WAAW,QAAQ;AAEjB,IAAK,eAAe,IAAI,KAAK,QAAQ,KACnC,eAAe,IAAI,KAAK,UAAU,CAAC,CAAC,GAEtC,eAAe,IAAI,KAAK,QAAQ,EAAG,KAAK,IAAI,GAGvC,eAAe,IAAI,KAAK,QAAQ,KACnC,eAAe,IAAI,KAAK,UAAU,CAAC,CAAC,GAEtC,eAAe,IAAI,KAAK,QAAQ,EAAG,KAAK,IAAI;AAG9C,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,YAAY,MAAM;AAAA,EACpB;AACF;AAKA,SAASC,qBAAyC;AAChD,SAAO;AAAA,IACL,OAAO,CAAC;AAAA,IACR,gBAAgB,oBAAI,IAAI;AAAA,IACxB,gBAAgB,oBAAI,IAAI;AAAA,IACxB,YAAY;AAAA,EACd;AACF;;;AC5iBO,SAAS,qBAAqBC,OAAc,aAAoC;AAErF,MAAM,QADaA,MAAK,QAAQ,OAAO,GAAG,EAAE,QAAQ,OAAO,EAAE,EACpC,MAAM,GAAG,GAE5B,MAAM,MAAM,QAAQ,WAAW;AACrC,MAAI,MAAM,EAAG,QAAO;AAEpB,MAAM,YAAY,MAAM,MAAM,MAAM,CAAC;AACrC,SAAO,UAAU,SAAS,IAAI,UAAU,KAAK,GAAG,IAAI;AACtD;AAUA,SAAS,gBAAgB,mBAA2B,cAAsC;AACxF,MAAI,CAAC,qBAAqB,sBAAsB,GAAI,QAAO;AAE3D,MAAM,QAAQ,kBAAkB,MAAM,GAAG,GAEnC,SAAS,kBAAkB,YAAY;AAC7C,MAAI;AACF,WAAO,sBAAsB,OAAO,MAAM;AAM5C,MAAM,gBADe,gCAAgC,iBAAiB,EACnC,MAAM,GAAG,GACtC,cAAc,cAAc,cAAc,SAAS,CAAC,KAAK,IACzD,iBAAiB,eAAe,WAAW;AAEjD,MAAI,cAAc,WAAW;AAC3B,WAAO,kBAAkB;AAG3B,MAAM,UAAU,cAAc,MAAM,GAAG,EAAE,EAAE,KAAK,GAAG;AACnD,SAAO,UAAU,GAAG,OAAO,IAAI,cAAc,KAAK;AACpD;AASO,SAAS,uBAAuBA,OAAc,cAAsC;AACzF,MAAM,iBAAiB;AAEvB,MAAI,mBAAmB;AACrB,WAAO;AAGT,MAAI,mBAAmB;AACrB,WAAO;AAIT,MAAM,cADM,mBAAmB,cAAc,EACrB;AAExB,MAAI,CAAC;AACH,WAAO,SAAS,gBAAgBA,OAAM,OAAO,CAAC;AAGhD,MAAM,YAAY,qBAAqBA,OAAM,WAAW;AACxD,MAAI,cAAc,MAAM;AACtB,QAAMC,aAAY,YAAY,cAAc,GACtC,WAAWD,MAAK,QAAQ,OAAO,GAAG,EAAE,MAAM,GAAG,EAAE,IAAI,KAAK;AAC9D,WAAO,GAAGC,UAAS,IAAI,eAAe,QAAQ,CAAC;AAAA,EACjD;AAEA,MAAM,YAAY,gBAAgB,WAAW,cAAc;AAE3D,SAAO,GADW,YAAY,cAAc,CACzB,IAAI,SAAS;AAClC;AAUA,SAAS,sBAAsB,OAAiB,QAAwB;AACtE,MAAM,MAAM,gBAAgB,OAAO,MAAM;AACzC,SAAI,MAAM,IACD,MAAM,MAAM,GAAG,GAAG,EAAE,KAAK,GAAG,IAE9B,MAAM,CAAC,KAAK;AACrB;AAQO,SAAS,sBAAsB,OAAiB,cAAwC;AAC7F,MAAM,SAAS,kBAAkB,YAAY;AAC7C,MAAI,CAAC,OAAQ,QAAO,CAAC;AAGrB,MAAM,cADM,mBAAmB,YAAY,EACnB;AACxB,MAAI,CAAC,YAAa,QAAO,CAAC;AAE1B,MAAM,aAAa,oBAAI,IAAY;AACnC,WAAW,KAAK,OAAO;AACrB,QAAM,YAAY,qBAAqB,GAAG,WAAW;AACrD,QAAI,cAAc,KAAM;AACxB,QAAM,QAAQ,UAAU,MAAM,GAAG,GAC3B,MAAM,gBAAgB,OAAO,MAAM;AACzC,IAAI,MAAM,KACR,WAAW,IAAI,MAAM,MAAM,GAAG,GAAG,EAAE,KAAK,GAAG,CAAC;AAAA,EAEhD;AAEA,SAAO,MAAM,KAAK,UAAU,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,SAAS,EAAE,MAAM;AAClE;AASO,SAAS,qBAAqBD,OAAc,cAA8B,YAA8B;AAE7G,MAAM,cADM,mBAAmB,YAAY,EACnB,SAClB,YAAY,YAAY,YAAY;AAE1C,MAAI,CAAC,aAAa;AAChB,QAAM,WAAWA,MAAK,QAAQ,OAAO,GAAG,EAAE,MAAM,GAAG,EAAE,IAAI,KAAK;AAC9D,WAAO,GAAG,SAAS,IAAI,eAAe,QAAQ,CAAC;AAAA,EACjD;AAEA,MAAM,YAAY,qBAAqBA,OAAM,WAAW;AACxD,MAAI,cAAc,MAAM;AACtB,QAAM,WAAWA,MAAK,QAAQ,OAAO,GAAG,EAAE,MAAM,GAAG,EAAE,IAAI,KAAK;AAC9D,WAAO,GAAG,SAAS,IAAI,eAAe,QAAQ,CAAC;AAAA,EACjD;AAGA,WAAW,YAAY;AACrB,QAAI,cAAc,YAAY,UAAU,WAAW,WAAW,GAAG;AAC/D,aAAO,GAAG,SAAS,IAAI,QAAQ;AAKnC,MAAM,SAAS,kBAAkB,YAAY,GACvC,QAAQ,UAAU,MAAM,GAAG;AACjC,SAAO,GAAG,SAAS,IAAI,SAAS,sBAAsB,OAAO,MAAM,IAAI,MAAM,CAAC,KAAK,SAAS;AAC9F;;;ACjJA,SAAS,wBAAwB,WAAmC;AAElE,MAAM,QADa,UAAU,QAAQ,OAAO,GAAG,EAAE,QAAQ,OAAO,EAAE,EACzC,MAAM,GAAG;AAElC,SAAI,MAAM,WAAW,MAAM,cAAc,cAAc,cAAc,eAC5D,QAGF,YAAY,MAAM,CAAC,CAAC,KAAK;AAClC;AASA,SAAS,oBAAoB,UAAkB,cAAsC;AAEnF,MAAM,SADY,YAAY,YAAY,IACf;AAC3B,SAAI,SAAS,WAAW,MAAM,IACrB,SAAS,MAAM,OAAO,MAAM,IAG9B;AACT;AAWO,SAAS,uBAAuB,YAAuD;AAC5F,MAAM,SAAS,oBAAI,IAAgC;AACnD,MAAI,WAAW,WAAW,EAAG,QAAO;AAGpC,MAAM,UAAU,oBAAI,IAA8B,GAC5C,WAAW,oBAAI,IAA4B;AAEjD,WAAW,OAAO,YAAY;AAC5B,QAAM,eAAe,wBAAwB,GAAG;AAChD,aAAS,IAAI,KAAK,YAAY,GACzB,QAAQ,IAAI,YAAY,KAC3B,QAAQ,IAAI,cAAc,CAAC,CAAC,GAE9B,QAAQ,IAAI,YAAY,EAAG,KAAK,GAAG;AAAA,EACrC;AAGA,MAAM,gBAAgB,oBAAI,IAA8B;AACxD,WAAW,CAAC,cAAc,IAAI,KAAK;AACjC,IAAI,kBAAkB,YAAY,KAChC,cAAc,IAAI,cAAc,sBAAsB,MAAM,YAAY,CAAC;AAK7E,WAAW,OAAO,YAAY;AAC5B,QAAM,eAAe,SAAS,IAAI,GAAG,GAC/B,aAAa,cAAc,IAAI,YAAY,GAC3C,WAAW,cAAc,WAAW,SAAS,IAC/C,qBAAqB,KAAK,cAAc,UAAU,IAClD,uBAAuB,KAAK,YAAY,GACtC,eAAe,oBAAoB,UAAU,YAAY;AAE/D,WAAO,IAAI,KAAK,EAAE,cAAc,cAAc,SAAS,CAAC;AAAA,EAC1D;AAEA,SAAO;AACT;AASO,SAAS,uBACd,OACiC;AACjC,MAAM,SAAS,oBAAI,IAAgC;AACnD,MAAI,MAAM,WAAW,EAAG,QAAO;AAG/B,MAAM,UAAU,oBAAI,IAA8B,GAC5C,YAAY,oBAAI,IAA4B;AAElD,WAAW,QAAQ;AACjB,cAAU,IAAI,KAAK,MAAM,KAAK,YAAY,GACrC,QAAQ,IAAI,KAAK,YAAY,KAChC,QAAQ,IAAI,KAAK,cAAc,CAAC,CAAC,GAEnC,QAAQ,IAAI,KAAK,YAAY,EAAG,KAAK,KAAK,IAAI;AAIhD,MAAM,gBAAgB,oBAAI,IAA8B;AACxD,WAAW,CAAC,cAAc,KAAK,KAAK;AAClC,IAAI,kBAAkB,YAAY,KAChC,cAAc,IAAI,cAAc,sBAAsB,OAAO,YAAY,CAAC;AAK9E,WAAW,QAAQ,OAAO;AACxB,QAAM,SAAS,kBAAkB,KAAK,YAAY;AAKlD,QAAI,QAAQ;AAEV,UAAM,cADM,mBAAmB,KAAK,YAAY,EACxB;AACxB,UAAI,aAAa;AACf,YAAM,YAAY,qBAAqB,KAAK,MAAM,WAAW;AAC7D,YAAI,cAAc,MAAM;AAEtB,cAAME,iBADe,UAAU,MAAM,GAAG,EAAE,IAAI,KAAK,QACb,QAEhC,kBADa,cAAc,IAAI,KAAK,YAAY,KAAK,CAAC,GAC1B;AAAA,YAChC,OAAK,cAAc,KAAK,UAAU,WAAW,IAAI,GAAG;AAAA,UACtD;AACA,cAAI,CAACA,iBAAgB,CAAC;AACpB;AAAA,QAEJ;AAAA,MACF;AAAA,IACF;AAEA,QAAM,aAAa,cAAc,IAAI,KAAK,YAAY,GAChD,WAAW,cAAc,WAAW,SAAS,IAC/C,qBAAqB,KAAK,MAAM,KAAK,cAAc,UAAU,IAC7D,uBAAuB,KAAK,MAAM,KAAK,YAAY,GACjD,eAAe,oBAAoB,UAAU,KAAK,YAAY;AAEpE,WAAO,IAAI,KAAK,MAAM,EAAE,cAAc,KAAK,cAAc,cAAc,SAAS,CAAC;AAAA,EACnF;AAEA,SAAO;AACT;AAaO,SAAS,+BACd,OACuC;AACvC,MAAI,MAAM,WAAW,EAAG,QAAO,oBAAI,IAAI;AAEvC,MAAM,aAAa;AAAA,IACjB,MAAM,IAAI,QAAM;AAAA,MACd,MAAM,EAAE;AAAA,MACR,cAAc,cAAc,EAAE,QAAQ;AAAA,IACxC,EAAE;AAAA,EACJ,GAEM,UAAU,oBAAI,IAAsC;AAE1D,WAAW,QAAQ,OAAO;AACxB,QAAM,MAAM,WAAW,IAAI,KAAK,aAAa;AAC7C,QAAI,CAAC,IAAK;AAEV,QAAM,MAAM,GAAG,IAAI,YAAY,KAAK,IAAI,YAAY;AAEpD,IAAK,QAAQ,IAAI,GAAG,KAClB,QAAQ,IAAI,KAAK;AAAA,MACf,cAAc,IAAI;AAAA,MAClB,cAAc,IAAI;AAAA,MAClB,UAAU,IAAI;AAAA,MACd,WAAW,CAAC;AAAA,IACd,CAAC,GAEH,QAAQ,IAAI,GAAG,EAAG,UAAU,KAAK,KAAK,aAAa;AAAA,EACrD;AAEA,SAAO;AACT;;;AC3KA,eAAsB,wBACpB,WACA,OAC6B;AAC7B,MAAM,EAAE,MAAM,IAAI,MAAM,mBAAmB,SAAS,GAC9C,WAAW,MAAM,YAAY,CAAC,GAE9B,YAAgC,CAAC,GACjC,mBAAsC,CAAC;AAI7C,WAAW,CAAC,SAAS,QAAQ,KAAK,OAAO,QAAQ,QAAQ,GAAG;AAC1D,QAAM,eAAe,SAAS,SAAS,CAAC,GAClC,cAAc,oBAAI,IAAgE,GAClF,iBAA2B,CAAC,GAE5B,aAAa,uBAAuB,OAAO,KAAK,YAAY,CAAC;AAEnE,aAAW,CAAC,WAAW,QAAQ,KAAK,OAAO,QAAQ,YAAY,GAAG;AAChE,UAAM,MAAM,WAAW,IAAI,SAAS,GAC9B,EAAE,cAAc,aAAa,IAAI,KAEjC,MAAM,iBAAiB,UACzB,iBACA,GAAG,YAAY,KAAK,YAAY;AAEpC,MAAK,YAAY,IAAI,GAAG,KACtB,YAAY,IAAI,KAAK,EAAE,YAAY,oBAAI,IAAI,GAAG,aAAa,CAAC,EAAE,CAAC;AAEjE,UAAM,QAAQ,YAAY,IAAI,GAAG;AACjC,YAAM,WAAW,IAAI,SAAS;AAE9B,eAAW,WAAW,UAAU;AAC9B,YAAM,SAAS,cAAc,OAAO;AACpC,cAAM,YAAY,KAAK,MAAM,GAC7B,eAAe,KAAK,MAAM;AAAA,MAC5B;AAAA,IACF;AAGA,aAAW,CAAC,KAAK,KAAK,KAAK,aAAa;AACtC,UAAM,CAAC,cAAc,YAAY,IAAI,IAAI,MAAM,IAAI;AACnD,gBAAU,KAAK;AAAA,QACb,MAAM;AAAA,QACN;AAAA,QACA;AAAA,QACA,aAAa;AAAA,QACb,YAAY,MAAM;AAAA,QAClB,aAAa,MAAM;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH;AAGA,qBAAiB,KAAK;AAAA,MACpB,aAAa;AAAA,MACb,SAAS,SAAS;AAAA,MAClB,eAAe,YAAY;AAAA,MAC3B,aAAa;AAAA,MACb;AAAA,IACF,CAAC;AAAA,EACH;AAGA,MAAI;AACF,QAAM,kBAAkB,MAAM,mBAAmB,SAAS,GACpD,UAAU,+BAA+B,gBAAgB,KAAK;AAEpE,aAAW,CAAC,EAAE,KAAK,KAAK;AACtB,gBAAU,KAAK;AAAA,QACb,MAAM;AAAA,QACN,cAAc,MAAM;AAAA,QACpB,cAAc,MAAM;AAAA,QACpB,YAAY,oBAAI,IAAI;AAAA,QACpB,aAAa,MAAM;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,EAEL,SAAS,OAAO;AACd,WAAO,KAAK,kEAAkE;AAAA,MAC5E,QAAQ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,IAC/D,CAAC;AAAA,EACH;AAGA,MAAM,eAAe,IAAI,IAAI,oBAAoB,IAAI,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;AACtE,mBAAU,KAAK,CAAC,GAAG,MAAM;AACvB,QAAM,SAAS,aAAa,IAAI,EAAE,YAAmB,KAAK,OACpD,SAAS,aAAa,IAAI,EAAE,YAAmB,KAAK;AAC1D,WAAI,WAAW,SAAe,SAAS,SAChC,EAAE,aAAa,cAAc,EAAE,YAAY;AAAA,EACpD,CAAC,GAGD,iBAAiB,KAAK,CAAC,GAAG,MAAM,EAAE,YAAY,cAAc,EAAE,WAAW,CAAC,GAEnE,EAAE,WAAW,UAAU,iBAAiB;AACjD;AAUA,eAAsB,qBACpB,WACA,OAC6B;AAC7B,MAAM,EAAE,mBAAAC,mBAAkB,IAAI,MAAM,OAAO,mCAAmC,GACxE,YAAY,MAAMA,mBAAkB,WAAW,SAAS,GAExD,YAAgC,CAAC;AAEvC,WAAW,cAAc,UAAU;AACjC,cAAU,KAAK;AAAA,MACb,MAAM;AAAA,MACN,cAAc,WAAW;AAAA,MACzB,cAAc,WAAW;AAAA,MACzB,YAAY,oBAAI,IAAI,CAAC,WAAW,YAAY,CAAC;AAAA,MAC7C,aAAa,CAAC,WAAW,YAAY;AAAA,MACrC;AAAA,MACA,YAAY,WAAW;AAAA,MACvB,aAAa,WAAW;AAAA,IAC1B,CAAC;AAIH,MAAM,eAAe,IAAI,IAAI,oBAAoB,IAAI,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;AACtE,mBAAU,KAAK,CAAC,GAAG,MAAM;AACvB,QAAM,SAAS,aAAa,IAAI,EAAE,YAAmB,KAAK,OACpD,SAAS,aAAa,IAAI,EAAE,YAAmB,KAAK;AAC1D,WAAI,WAAW,SAAe,SAAS,SAChC,EAAE,aAAa,cAAc,EAAE,YAAY;AAAA,EACpD,CAAC,GAEM,EAAE,WAAW,UAAU,CAAC,EAAE;AACnC;;;ACrKO,SAAS,kBAAkB,WAA2D;AAC3F,SAAO,UAAU,SAAS,aAAa,UAAU,UAAU,QAAQ,UAAU,SAAS;AACxF;AAMO,SAAS,qBAAqB,WAAwC;AAC3E,MAAI,UAAU,SAAS,WAAW;AAChC,QAAM,MAAM,UAAU,SAChB,UAAU,IAAI,WAAW,IAAI,YAAY,UAAU,MAAM,IAAI,OAAO,MAAM,IAC1EC,YAAW,eAAe,IAAI,KAAK;AACzC,WAAO,GAAG,IAAI,WAAW,GAAG,OAAO,cAAc,IAAI,aAAa,cAAcA,SAAQ;AAAA,EAC1F;AACA,MAAM,IAAI,UAAU,UACd,UAAU,EAAE,cAAc,UAAU,EAAE,WAAW,KAAK,IACtD,WAAW,eAAe,EAAE,KAAK;AACvC,SAAO,GAAG,EAAE,YAAY,KAAK,EAAE,YAAY,GAAG,OAAO,IAAI,QAAQ;AACnE;AAEO,SAAS,2BAA2B,WAAwC;AACjF,MAAM,QAAQ,UAAU,SAAS,YAC7B,UAAU,QAAS,cACnB,UAAU,SAAU;AACxB,MAAI,MAAM,WAAW,EAAG,QAAO;AAC/B,MAAM,eAAe,MAAM,MAAM,GAAG,CAAC,GAC/B,YAAY,MAAM,SAAS,aAAa,QAC1C,OAAO,aAAa,KAAK;AAAA,CAAI;AACjC,SAAI,YAAY,MACd,QAAQ;AAAA,GAAM,SAAS,UAElB;AACT;AAcO,SAAS,gBAAgB,WAA+B,MAAqC;AAClG,MAAM,aAAoC,CAAC,GACrC,YAAY,KAAK,YAAY;AAEnC,WAAW,YAAY,UAAU;AAC/B,IAAI,SAAS,aAAa,YAAY,MAAM,aAC1C,WAAW,KAAK,EAAE,MAAM,YAAY,SAAS,CAAC;AAIlD,WAAW,OAAO,UAAU;AAC1B,IAAI,IAAI,gBAAgB,QACtB,WAAW,KAAK,EAAE,MAAM,WAAW,SAAS,IAAI,CAAC;AAIrD,SAAO;AACT;AAiBA,eAAsB,cACpB,MACA,WACA,OAC2B;AAC3B,MAAM,YAAY,MAAM,wBAAwB,WAAW,KAAK;AAChE,SAAO,EAAE,YAAY,gBAAgB,WAAW,IAAI,EAAE;AACxD;AAiBA,eAAsB,kBACpB,MACA,WACA,OAC2B;AAC3B,MAAM,YAAY,MAAM,qBAAqB,WAAW,KAAK;AAC7D,SAAO,EAAE,YAAY,gBAAgB,WAAW,IAAI,EAAE;AACxD;;;ACzGA,eAAsB,eACpB,SACA,UACqD;AACrD,MAAM,UAAsD,CAAC,GACvD,MAAM,QAAQ,aAAa;AAGjC,MAAI,CAAC,QAAQ;AACX,QAAI;AACF,UAAM,UAAU,MAAM,uBAAuB,EAAE,QAAQ,IAAO,IAAI,CAAC,GAC7D,SAAS,MAAM,SAAS,EAAE,OAAO,WAAW,QAAQ,CAAC;AAC3D,cAAQ,KAAK,EAAE,OAAO,WAAW,OAAO,CAAC;AAAA,IAC3C,SAAS,OAAO;AACd,MAAI,QAAQ,eACV,QAAQ,aAAa,WAAW,KAAK,IAErC,OAAO,MAAM,mCAAmC;AAAA,QAC9C,QAAQ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAC/D,CAAC;AAAA,IAEL;AAIF,MAAI,CAAC,QAAQ;AACX,QAAI;AACF,UAAM,UAAU,MAAM,uBAAuB,EAAE,QAAQ,IAAM,IAAI,CAAC,GAC5D,SAAS,MAAM,SAAS,EAAE,OAAO,UAAU,QAAQ,CAAC;AAC1D,cAAQ,KAAK,EAAE,OAAO,UAAU,OAAO,CAAC;AAAA,IAC1C,SAAS,OAAO;AACd,MAAI,QAAQ,eACV,QAAQ,aAAa,UAAU,KAAK,IAEpC,OAAO,MAAM,kCAAkC;AAAA,QAC7C,QAAQ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAC/D,CAAC;AAAA,IAEL;AAGF,SAAO;AACT;AAMA,eAAsB,mBACpB,SACA,UACc;AAEd,UADqB,MAAM,eAAe,SAAS,QAAQ,GACvC,QAAQ,QAAM,GAAG,MAAM;AAC7C;;;AC/CA,eAAsB,aACpB,MACA,YACA,cACA,UAAiC,CAAC,GAClC,QACA,QACc;AACd,MAAM;AAAA,IACJ,kBAAkB,IAAI,IAAI;AAAA;AAAA,IAC1B,kBAAkB;AAAA,GAAM,IAAI;AAAA;AAAA,IAC5B,gBAAgB;AAAA,IAChB,QAAQ;AAAA,EACV,IAAI,SAEE,MAAM,UAAU,cAAc,GAC9B,MAAM,UAAU,cAAc;AAGpC,MAAI,WAAW,WAAW;AACxB,UAAM,IAAI;AAAA,MACR,gBAAgB,QAAQ,eAAe,IAAI;AAAA,IAC7C;AAIF,MAAI,WAAW,WAAW;AACxB,WAAO,CAAC,WAAW,CAAC,CAAC;AAIvB,MAAM,UAAU,WAAW,IAAI,CAAC,GAAG,MAAM;AACvC,QAAM,SAAS,aAAa,GAAG,CAAC;AAChC,WAAO;AAAA,MACL,OAAO,OAAO;AAAA,MACd,aAAa,OAAO;AAAA,MACpB,OAAO;AAAA,IACT;AAAA,EACF,CAAC;AAID,MAFA,IAAI,KAAK,gBAAgB,QAAQ,eAAe,IAAI,EAAE,KAAK,CAAC,GAExD,OAAO;AACT,QAAM,kBAAkB,MAAM,IAAI;AAAA,MAChC;AAAA,MACA;AAAA,IACF;AAEA,WAAI,CAAC,mBAAmB,gBAAgB,WAAW,IAC1C,CAAC,IAEH,gBAAgB,IAAI,OAAK,WAAW,CAAC,CAAC;AAAA,EAC/C,OAAO;AAEL,QAAM,gBAAgB,MAAM,IAAI;AAAA,MAC9B;AAAA,MACA;AAAA,IACF;AAEA,WAAI,iBAAkB,OACb,CAAC,IAEH,CAAC,WAAW,aAAa,CAAC;AAAA,EACnC;AACF;;;AClEO,SAAS,qBAAqB,OAA2C;AAE9E,MACE,UAAU,OACV,UAAU,OACV,MAAM,WAAW,IAAI,KACrB,MAAM,WAAW,KAAK,KACtB,MAAM,WAAW,GAAG,KACpB,MAAM,WAAW,IAAI;AAErB,WAAO,EAAE,MAAM,gBAAgB;AAIjC,MAAI,MAAM,SAAS,GAAG;AACpB,WAAO,EAAE,MAAM,QAAQ;AAIzB,MAAM,QAAQ,mBAAmB,KAAK;AACtC,SAAI,MAAM,cAAc,MAAM,OACrB,EAAE,MAAM,gBAAgB,MAAM,IAIhC,EAAE,MAAM,QAAQ;AACzB;AA+BA,eAAsB,oBACpB,OACA,cACA,SACA,KAC2B;AAC3B,MAAM,QAAQ,mBAAmB,KAAK,GAGhC,SAA2B,CAAC;AAElC,QAAM;AAAA,IACJ;AAAA,IACA,OAAO,EAAE,OAAO,QAAQ,MAAM;AAC5B,UAAM,SAAS,MAAM,cAAc,MAAM,MAAM,QAAQ,WAAW,KAAK;AACvE,UAAI,OAAO,WAAW,SAAS,GAAG;AAEhC,YAAM,EAAE,MAAM,IAAI,MAAM,mBAAmB,QAAQ,SAAS;AAC5D,iBAAW,KAAK,OAAO,YAAY;AACjC,cAAI,mBACE,UAAU,EAAE,SAAS,aAAa,EAAE,UAAU,cAAc,EAAE,SAAS;AAC7E,cAAI,SAAS;AACX,gBAAM,WAAW,MAAM,SAAS,OAAO;AACvC,YAAI,UAAU,SACZ,oBAAoB,oBAAoB,SAAS,MAAM,QAAQ,SAAS,EAAE;AAAA,UAE9E;AACA,iBAAO,KAAK,EAAE,WAAW,GAAG,WAAW,QAAQ,WAAW,kBAAkB,CAAC;AAAA,QAC/E;AAAA,MACF;AACA,aAAO,CAAC,IAAI;AAAA,IACd;AAAA,EACF;AAGA,MAAM,aAAa,SAAS,cAAc,MAAM,YAC5C,WAAW;AAUf,MATI,eACF,WAAW,SAAS;AAAA,IAClB,OAAK,EAAE,UAAU,SAAS,cAAc,EAAE,UAAU,UAAU,iBAAiB;AAAA,EACjF,IAME,CAAC,cAAc,SAAS,SAAS,GAAG;AACtC,QAAM,oBAAoB,SAAS,OAAO,OAAK,EAAE,UAAU,SAAS,SAAS;AAC7E,IAAI,kBAAkB,SAAS,MAC7B,WAAW;AAAA,EAEf;AAGA,MAAI,SAAS,mBAAmB,SAAS,SAAS,GAAG;AACnD,QAAM,YAAY,SAAS,OAAO,OAAK,kBAAkB,EAAE,SAAS,MAAM,QAAQ,eAAe;AACjG,IAAI,UAAU,SAAS,MACrB,WAAW;AAAA,EAEf;AAGA,MAAM,MAAM,cAAc,GAAG,GACvB,MAAM,cAAc,GAAG;AAoB7B,SAlBiB,MAAM;AAAA,IACrB;AAAA,IACA;AAAA,IACA,CAAC,OAAO;AAAA,MACN,OAAO,qBAAqB,EAAE,SAAS;AAAA,MACvC,aAAa,2BAA2B,EAAE,SAAS;AAAA,MACnD,OAAO;AAAA,IACT;AAAA,IACA;AAAA,MACE,iBAAiB,SAAS,mBAAmB,IAAI,KAAK;AAAA,mEAA+F,KAAK;AAAA;AAAA,MAC1J,iBAAiB,SAAS;AAAA,MAC1B,eAAe,SAAS,iBAAiB;AAAA,MACzC,OAAO,SAAS;AAAA,IAClB;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGF;;;ACzKO,SAAS,cAAc,SAAqD;AACjF,SAAO,OAAO,WAAY,WAAW,UAAU,QAAQ;AACzD;AAKO,SAAS,iBAAiB,SAAmF;AAClH,SAAO,OAAO,WAAY;AAC5B;AAKO,SAAS,gBACd,SACsC;AACtC,SACE,OAAO,WAAY,YACnB,CAAC,CAAC,QAAQ,SACV,MAAM,QAAQ,QAAQ,IAAI,KAC1B,QAAQ,KAAK,SAAS;AAE1B;AAKO,SAAS,mBACd,UACA,UACwC;AACxC,MAAM,WAAW,oBAAI,IAAgD;AACrE,WAAW,KAAK;AACd,aAAS,IAAI,cAAc,CAAC,GAAG,CAAC;AAElC,WAAW,KAAK,UAAU;AACxB,QAAM,KAAK,cAAc,CAAC,GACpB,QAAQ,SAAS,IAAI,EAAE;AAC7B,KAAI,CAAC,SAAU,OAAO,SAAU,YAAY,OAAO,KAAM,aACvD,SAAS,IAAI,IAAI,CAAC;AAAA,EAEtB;AACA,SAAO,MAAM,KAAK,SAAS,OAAO,CAAC;AACrC;AA0BO,SAAS,mBACd,OACA,UACsD;AAEtD,MAAI,SAAS,KAAK;AAChB,WAAO,EAAE,KAAK,OAAO,OAAO,SAAS,KAAK,EAAE;AAI9C,WAAW,OAAO,OAAO,KAAK,QAAQ;AACpC,QAAI,0BAA0B,KAAK,KAAK;AACtC,aAAO,EAAE,KAAK,OAAO,SAAS,GAAG,EAAE;AAMvC,MAAI,CAAC,gBAAgB,KAAK,GAAG;AAC3B,QAAM,SAAS,MAAM,MAAM,YAAY,GACjC,mBAAoE,CAAC;AAC3E,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,QAAQ;AAChD,MAAI,gBAAgB,GAAG,KAAK,IAAI,YAAY,EAAE,SAAS,MAAM,KAC3D,iBAAiB,KAAK,EAAE,KAAK,MAAM,CAAC;AAGxC,QAAI,iBAAiB,WAAW;AAC9B,aAAO,iBAAiB,CAAC;AAAA,EAG7B;AAGA,MAAM,aAAa,8BAA8B,KAAK;AACtD,MAAI,eAAe,MAAM,YAAY,GAAG;AACtC,QAAI,SAAS,UAAU;AACrB,aAAO,EAAE,KAAK,YAAY,OAAO,SAAS,UAAU,EAAE;AAExD,aAAW,OAAO,OAAO,KAAK,QAAQ;AACpC,UAAI,0BAA0B,KAAK,UAAU;AAC3C,eAAO,EAAE,KAAK,OAAO,SAAS,GAAG,EAAE;AAAA,EAGzC;AAIA,MADa,qBAAqB,KAAK,EAC9B,SAAS,gBAAgB;AAChC,QAAM,iBAAiB,QAAQ;AAC/B,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,QAAQ,GAAG;AACnD,UAAM,QAAQ,MAAM;AACpB,UAAK;AACL,iBAAW,aAAa,OAAO,KAAK,KAAK;AACvC,cAAI,UAAU,WAAW,cAAc,KAAK,cAAc;AACxD,mBAAO,EAAE,KAAK,MAAM;AAAA;AAAA,IAG1B;AAAA,EACF;AAEA,SAAO;AACT;;;AjB5EA,eAAsB,uBACpB,aACA,SAC0B;AAC1B,MAAM,SAA0B;AAAA,IAC9B,SAAS;AAAA,IACT,gBAAgB;AAAA,IAChB,cAAc;AAAA,IACd,aAAa,CAAC;AAAA,IACd,aAAa,CAAC;AAAA,IACd,WAAW,CAAC;AAAA,IACZ,QAAQ,CAAC;AAAA,IACT,UAAU,CAAC;AAAA,EACb,GAEM,WAAW,QAAQ,YAAY,mBAAmB;AAExD,WAAW,CAAC,MAAM,OAAO,KAAK;AAC5B,aAAW,aAAa;AACtB,UAAI;AACF,YAAM,eAAe,MAAM;AAAA,UACzB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAeA,YAZI,aAAa,aACf,OAAO,kBAGL,aAAa,WAAW,CAAC,QAAQ,UACnC,OAAO,gBAGL,aAAa,cACf,OAAO,YAAY,KAAK,aAAa,UAAU,GAG7C,aAAa,aAAa;AAC5B,cAAM,MAAM,aAAa,cAAc;AACvC,UAAK,OAAO,YAAY,GAAG,MACzB,OAAO,YAAY,GAAG,IAAI,CAAC,IAE7B,OAAO,YAAY,GAAG,IAAI;AAAA,YACxB,OAAO,YAAY,GAAG;AAAA,YAAG,aAAa;AAAA,UACxC;AAAA,QACF;AAEA,QAAI,aAAa,aACf,OAAO,UAAU,KAAK,GAAG,aAAa,SAAS,GAG7C,aAAa,YACf,OAAO,SAAS,KAAK,GAAG,aAAa,QAAQ,GAG1C,aAAa,YAChB,OAAO,UAAU,IACb,aAAa,SACf,OAAO,OAAO,KAAK;AAAA,UACjB;AAAA,UACA,YAAY;AAAA,UACZ,OAAO,aAAa;AAAA,UACpB,SAAS,aAAa,MAAM;AAAA,QAC9B,CAAC;AAAA,MAIP,SAAS,OAAO;AACd,eAAO,UAAU,IACjB,OAAO,OAAO,KAAK;AAAA,UACjB;AAAA,UACA,YAAY;AAAA,UACZ;AAAA,UACA,SAAS,oBAAoB,SAAS,KAAM,MAAgB,OAAO;AAAA,QACrE,CAAC;AAAA,MACH;AAIJ,SAAO;AACT;AA8BA,eAAe,kBACb,MACA,WACA,SACA,UACiC;AACjC,MAAM,YAAYC,OAAK,QAAQ,aAAa,SAAS,GAG/C,eAAe,oBAAoB,KAAK,IAAI,GAC5C,eAAe,oBAAoB,WAAW,YAAY,GAG1D,gBAA6B;AAAA,IACjC,GAAG;AAAA,IACH,WAAW;AAAA,MACT,GAAG,QAAQ;AAAA,MACX,YAAY;AAAA,MACZ,WAAWC,SAAQ,SAAS;AAAA,MAC5B,YAAYC,WAAS,SAAS;AAAA,MAC9B,GAAI,eAAe,EAAE,aAAa,IAAI,CAAC;AAAA,IACzC;AAAA,EACF,GAEM,eAAe;AAAA,IACnB,KAAK;AAAA,IACL,CAAC,OAAO,wBAAwB,IAAI,aAAa;AAAA,EACnD,KAAK,IACC,oBAAoB,eAAe,cAAc,eAAe,YAAY,GAC5E,YAAY;AAAA,IAChB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,GACM,YAAYC,UAAS,QAAQ,eAAe,SAAS,GAErD,gBAAgB,2BAA2B,SAAS,GAEpD,iBADc,QAAQ,iBAAiB,IAAI,aAAa,KACxB,eAGlC,kBACE,eAAe,KAAK;AAC1B,MAAI,OAAO,gBAAiB,YAAY,iBAAiB,QAAQ,EAAE,aAAa;AAC9E,QAAI,MAAM,QAAQ,YAAY,GAAG;AAC/B,UAAM,QAAQ,aAAa,CAAC;AAC5B,MAAI,OAAO,SAAU,YAAY,UAAU,QAAQ,YAAY,UAC7D,mBAAoB,MAA8B;AAAA,IAEtD,MAAO,CAAI,YAAY,iBACrB,mBAAoB,aAAqC;AAK7D,MAAM,eAAqB;AAAA,IACzB,GAAG;AAAA,IACH,MAAM;AAAA,IACN,IAAI;AAAA,IACJ,GAAI,mBAAmB,EAAE,cAAc,iBAAiB,IAAI,CAAC;AAAA,EAC/D,GAGM,aAAa,MAAM,SAAS,YAAY,cAAc,aAAa;AAKzE,MAFmB,WAAW,UAAU,SAAS,+BAA+B;AAG9E,WAAO;AAAA,MACL,SAAS;AAAA,MACT,WAAW;AAAA,MACX,SAAS;AAAA,IACX;AAGF,MAAI,CAAC,WAAW;AACd,WAAO;AAAA,MACL,SAAS;AAAA,MACT,WAAW;AAAA,MACX,SAAS;AAAA,MACT,OAAO,WAAW,SAAS,IAAI,MAAM,uBAAuB;AAAA,IAC9D;AAIF,MAAM,UAAS,OAAO,WAAW,UAAW,UACxC,WAAW;AAGf,MAAI,OAAO,UAAW;AACpB,WAAO;AAAA,MACL,SAAS;AAAA,MACT,WAAW;AAAA,MACX,SAAS;AAAA,IACX;AAIF,MAAM,sBADyBA,UAAS,QAAQ,eAAe,MAAM,EAClB,QAAQ,OAAO,GAAG,GAG/D,qBACH,WAAW,UAAU,UAAU,WAAW,UAAU,cACrD,MAAM,QAAQ,WAAW,IAAI,GAEzB,cAAsD,CAAC;AAE7D,EAAI,oBACF,YAAY,KAAK;AAAA,IACf,QAAQ;AAAA,IACR,OAAO,WAAW;AAAA,IAClB,MAAM,WAAW;AAAA,IACjB,MAAM,WAAW;AAAA,IACjB,YAAY,WAAW;AAAA,EACzB,CAAC,IACQ,WAAW,cACpB,YAAY,KAAK;AAAA,IACf,QAAQ;AAAA,IACR,MAAM,WAAW;AAAA,IACjB,YAAY,WAAW;AAAA,EACzB,CAAC,IAED,YAAY,KAAK,mBAAmB;AAItC,MAAM,YAAY,WAAW,WAAW,IAAI,eAAa;AAAA,IACvD,MAAM,SAAS;AAAA,IACf,QAAQ,SAAS;AAAA,IACjB,QAAQ,SAAS;AAAA,EACnB,EAAE,KAAK,CAAC,GAGF,qBAAqB,WAAW,UAAU;AAAA,IAC9C,OAAK,MAAM,mCAAmC,MAAM;AAAA,EACtD;AAEA,SAAO;AAAA,IACL,SAAS;AAAA,IACT,WAAW;AAAA,IACX,SAAS;AAAA,IACT,YAAY;AAAA,IACZ;AAAA,IACA,YAAY;AAAA,IACZ;AAAA,IACA,GAAI,sBAAsB,mBAAmB,SAAS,IAAI,EAAE,UAAU,mBAAmB,IAAI,CAAC;AAAA,EAChG;AACF;AAYO,SAAS,sBACd,eACA,aACA,WACA,SACQ;AACR,MAAM,uBAAuBA,UAAS,QAAQ,aAAa,aAAa;AAGxE,MAAI,UAAU,SAAS,GAAG,GAAG;AAE3B,QAAI,UAAU,SAAS,IAAI,GAAG;AAC5B,UAAM,YAAY;AAAA,QAChB;AAAA,QACA;AAAA,QACA;AAAA,MACF;AACA,aAAOH,OAAK,QAAQ,eAAe,SAAS;AAAA,IAC9C;AAGA,QAAM,YAAYI,SAAQ,aAAa,GACjC,aAAaF,WAAS,eAAe,SAAS,GAE9C,UAAU,UAAU,MAAM,GAAG,GAC7B,WAAW,QAAQ,CAAC,GACpB,WAAW,QAAQ,CAAC,KAAK,IAEzB,YAAY,SAAS,WAAW,GAAG,IAAI,WAAY,YAAY,UAC/D,iBAAiB,aAAa,WAG9B,yBAAyB,gCAAgC,cAAc;AAE7E,WAAOF,OAAK,QAAQ,eAAe,WAAW,sBAAsB;AAAA,EACtE;AAGA,SAAOA,OAAK,QAAQ,eAAe,SAAS;AAC9C;AAmDO,SAAS,0BAA0B,SAA6C;AACrF,MAAM,aAA8B;AAAA,IAClC,SAAS;AAAA,IACT,gBAAgB;AAAA,IAChB,cAAc;AAAA,IACd,aAAa,CAAC;AAAA,IACd,aAAa,CAAC;AAAA,IACd,WAAW,CAAC;AAAA,IACZ,QAAQ,CAAC;AAAA,IACT,UAAU,CAAC;AAAA,EACb;AAEA,WAAW,UAAU,SAAS;AAC5B,eAAW,kBAAkB,OAAO,gBACpC,WAAW,gBAAgB,OAAO,cAClC,WAAW,YAAY,KAAK,GAAG,OAAO,WAAW,GACjD,WAAW,UAAU,KAAK,GAAG,OAAO,SAAS,GAC7C,WAAW,OAAO,KAAK,GAAG,OAAO,MAAM;AAGvC,aAAW,CAAC,QAAQ,OAAO,KAAK,OAAO,QAAQ,OAAO,WAAW;AAC/D,iBAAW,YAAY,MAAM,IAAI;AAAA,QAC/B,WAAW,YAAY,MAAM,KAAK,CAAC;AAAA,QAAG;AAAA,MACxC;AAGF,IAAK,OAAO,YACV,WAAW,UAAU;AAAA,EAEzB;AAEA,SAAO;AACT;;;AkBzcA,SAAS,YAAAK,kBAAgB;AAiBlB,SAAS,wBACd,aACA,UACqB;AACrB,MAAM,WAAW,oBAAI,IAAoB,GAGnC,aAAuB,CAAC;AAC9B,WAAW,WAAW,YAAY,OAAO;AACvC,eAAW,KAAK,GAAG,OAAO;AAE5B,MAAM,cAAc,iBAAiB,UAAU;AAE/C,WAAW,CAAC,MAAM,OAAO,KAAK,aAAa;AACzC,QAAM,yBAAmC,CAAC;AAE1C,aAAW,aAAa,SAAS;AAE/B,UAAI,CAAC,gCAAgC,WAAW,QAAQ,KACpD,UAAU,SAAS,GAAG,KACtB,UAAU,MAAM,GAAG,EAAE,UAAU,GAAG;AACpC,YAAM,QAAQC,WAAS,SAAS,EAAE,MAAM,GAAG,GACrC,mBAAmB,MAAM,MAAM,SAAS,CAAC;AAC/C,YAAI,qBAAqB,YAAY,aAAa,gBAAgB;AAChE;AAAA,MAEJ;AAGA,MAAI,wBAAwB,WAAW,UAAU,YAAY,WAAW,KAIxE,uBAAuB,KAAK,SAAS;AAAA,IACvC;AAEA,IAAI,uBAAuB,SAAS,KAClC,SAAS,IAAI,MAAM,sBAAsB;AAAA,EAE7C;AAEA,SAAO;AACT;;;AC7CA,SAAS,YAAAC,iBAAgB;AAwClB,SAAS,uBACd,KACA,YAAgC,WACnB;AACb,MAAM,cAAc,sBAAsB,IAAI,UAAU,IAAI,aAAa,GACnE,eAAe,kCAAkC,IAAI,WAAW,GAGhE,iBAAiB,IAAI,kBAAkB,eAAe,YAAY;AAExE,SAAO;AAAA,IACL,eAAe,IAAI;AAAA,IACnB,aAAa,IAAI;AAAA,IACjB,UAAU,IAAI;AAAA,IACd,aAAa,IAAI;AAAA,IACjB;AAAA,IACA,WAAW;AAAA,MACT,MAAM,IAAI;AAAA,MACV,SAAS,IAAI;AAAA,MACb,UAAU,IAAI;AAAA,MACd,UAAU,YAAY;AAAA,MACtB,SAAS,uBAAuB,WAAW;AAAA,MAC3C,UAAU,IAAI;AAAA,MACd,gBAAgB,IAAI;AAAA,MACpB,QAAQ;AAAA,MACR,gBAAgB;AAAA,MAChB,YAAY,IAAI;AAAA,MAChB;AAAA,IACF;AAAA,IACA,QAAQ,IAAI;AAAA,EACd;AACF;AAaA,eAAsB,yBACpB,OACA,KACA,SACA,MAC8B;AAE9B,MAAM,cAAc,MAAM,oBAAoB,OAAO,IAAI,aAAa,OAAO,GAGvE,mBAAmB;AAAA,IACvB;AAAA,IACA,IAAI;AAAA,IACJ,IAAI;AAAA,EACN,GAGM,mBAAmB,wBAAwB,kBAAkB,IAAI,QAAQ;AAG/E,MAAI,MAAM;AACR,aAAW,CAAC,MAAM,OAAO,KAAK,kBAAkB;AAC9C,UAAM,OAAO,QAAQ,OAAO,OAAK,KAAK,gBAAiB,IAAI,CAAC,CAAC;AAC7D,MAAI,KAAK,SAAS,IAChB,iBAAiB,IAAI,MAAM,IAAI,IAE/B,iBAAiB,OAAO,IAAI;AAAA,IAEhC;AAGF,SAAO;AACT;AAUA,eAAsB,mBACpB,aACA,SAC0B;AAC1B,SAAO,uBAAuB,aAAa,OAAO;AACpD;AAYA,eAAsB,uBACpB,KACA,MAC+B;AAE/B,MAAI,CAAC,kBAAkB,IAAI,UAAU,IAAI,aAAa;AACpD,WAAO;AAAA,MACL,iBAAiB,qBAAqB;AAAA,MACtC,qBAAqB,oBAAI,IAAI;AAAA,IAC/B;AAIF,MAAM,QAAQ,mBAAmB,IAAI,UAAU,IAAI,aAAa;AAChE,MAAI,MAAM,WAAW;AACnB,WAAO;AAAA,MACL,iBAAiB,qBAAqB;AAAA,MACtC,qBAAqB,oBAAI,IAAI;AAAA,IAC/B;AAIF,MAAM,UAAU,uBAAuB,KAAK,SAAS,GAG/C,sBAAsB,MAAM;AAAA,IAChC;AAAA,IAAO;AAAA,IAAK;AAAA,IAAS;AAAA,EACvB;AAKA,SAAO,EAAE,iBAFe,MAAM,mBAAmB,qBAAqB,OAAO,GAEnD,oBAAoB;AAChD;AAUO,SAAS,uBACd,aACA,gBACA,aACqB;AACrB,MAAI,CAAC;AACH,WAAO;AAGT,MAAM,oBAAoB,eAAe,QAAQ,OAAO,GAAG,GACrD,kBAAkB,oBAAI,IAAoB;AAEhD,WAAW,CAAC,MAAM,OAAO,KAAK,YAAY,QAAQ,GAAG;AACnD,QAAM,WAAW,QAAQ,OAAO,gBAAc;AAC5C,UAAM,QAAQ,WAAW,WAAW,GAAG,KAAK,kBAAkB,KAAK,UAAU,GACvE,mBAAmB,WAAW,QAAQ,OAAO,GAAG,GAChD,gBACJ,QAAQC,UAAS,aAAa,UAAU,IAAI,kBAC5C,QAAQ,OAAO,GAAG;AAEpB,aAAO,iBAAiB,cAAc,iBAAiB;AAAA,IACzD,CAAC;AAED,IAAI,SAAS,SAAS,KACpB,gBAAgB,IAAI,MAAM,QAAQ;AAAA,EAEtC;AAEA,SAAO;AACT;AAMA,SAAS,uBAAwC;AAC/C,SAAO;AAAA,IACL,SAAS;AAAA,IACT,gBAAgB;AAAA,IAChB,cAAc;AAAA,IACd,aAAa,CAAC;AAAA,IACd,aAAa,CAAC;AAAA,IACd,WAAW,CAAC;AAAA,IACZ,QAAQ,CAAC;AAAA,IACT,UAAU,CAAC;AAAA,EACb;AACF;;;ACxOO,IAAe,eAAf,MAA4D;AAAA;AAAA;AAAA;AAAA,EAcvD,oBAAuC;AAC/C,WAAO,kBAAkB;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKU,mBAAmB,UAAoB,KAAa;AAC5D,WAAO,mBAAmB,UAAU,GAAG;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOU,iBACR,SACA,YAAgC,WACnB;AACb,QAAM,cAAqC;AAAA,MACzC,aAAa,QAAQ;AAAA,MACrB,aAAa,QAAQ;AAAA,MACrB,eAAe,QAAQ;AAAA,MACvB,UAAU,QAAQ;AAAA,MAClB,gBAAgB,QAAQ;AAAA,MACxB,UAAU,QAAQ;AAAA,MAClB,QAAQ,QAAQ;AAAA,MAChB,mBAAmB,QAAQ;AAAA,MAC3B,gBAAgB,QAAQ;AAAA,IAC1B;AACA,WAAO,uBAAuB,aAAa,SAAS;AAAA,EACtD;AAAA;AAAA;AAAA;AAAA,EAKU,qBAAqB,SAAmC;AAAA,EAElE;AAAA;AAAA;AAAA;AAAA,EAKU,WAAW,QAA2B,SAAmC;AACjF;AAAA,MACE;AAAA,MACA,QAAQ;AAAA,MACR,QAAQ;AAAA,MACR,QAAQ,UAAU;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKU,kBACR,SACA,OACA,SACmB;AACnB,WAAO;AAAA,MACL,SAAS;AAAA,MACT,gBAAgB;AAAA,MAChB,cAAc;AAAA,MACd,WAAW,CAAC;AAAA,MACZ,QAAQ,CAAC;AAAA,QACP,MAAM,EAAE,MAAM,QAAQ,aAAa,IAAI,QAAQ,cAAc;AAAA,QAC7D,YAAY,QAAQ;AAAA,QACpB;AAAA,QACA;AAAA,MACF,CAAC;AAAA,MACD,aAAa,CAAC;AAAA,MACd,aAAa,CAAC;AAAA,IAChB;AAAA,EACF;AAEF;;;ACvGA,SAAS,YAAAC,YAAU,WAAAC,WAAS,QAAAC,QAAM,YAAAC,kBAAgB;AAClD,SAAS,YAAYC,WAAU;AAuB/B,SAAS,UAAAC,eAAc;AACvB,SAAS,WAAAC,UAAS,MAAAC,WAAU;AAC5B,SAAS,aAAAC,kBAAiB;AAsDnB,IAAM,oBAAN,MAAwB;AAAA,EAG7B,YAAY,eAAuB;AACjC,SAAK,gBAAgB;AAAA,EACvB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,QACJ,KACA,SACA,gBACA,SAC2B;AAC3B,WAAO,KAAK,gCAAgC;AAAA,MAC1C,SAAS,IAAI,SAAS;AAAA,MACtB;AAAA,MACA,YAAY,CAAC,CAAC;AAAA,IAChB,CAAC;AAGD,QAAM,oBAAoB,WAAW,yBAAyB,GAAG,GAG3D,oBAAoB,mBAAmB,mBAAmB,cAAc,GAGxE,eAAe,IAAI,WAAW,oBAAoB,IAAI,KAAK;AAEjE,WAAO,MAAM,iBAAiB;AAAA,MAC5B,MAAM,aAAa;AAAA,MACnB,UAAU,aAAa;AAAA,MACvB,YAAY,aAAa;AAAA,MACzB,QAAQ,IAAI,UAAU,aAAa;AAAA,MACnC,iBAAiB,kBAAkB,eAAe;AAAA,IACpD,CAAC;AAGD,QAAM,WAAW,KAAK,cAAc,cAAc,cAAc;AAEhE,WAAK,SAAS,kBAWP,MAAM,KAAK,gBAAgB,KAAK,mBAAmB,UAAU,OAAO,KAVzE,OAAO,KAAK,sCAAsC,GAC3C;AAAA,MACL,SAAS;AAAA,MACT,kBAAkB;AAAA,MAClB,gBAAgB;AAAA,MAChB,QAAQ,CAAC;AAAA,IACX;AAAA,EAKJ;AAAA;AAAA;AAAA;AAAA,EAKA,cACE,cACA,gBACoB;AACpB,QAAM,SAA4B,CAAC,GAC7B,YAAY,gBAAgB,cAAc,cAAc;AAS9D,QAPA,OAAO,KAAK,iCAAiC;AAAA,MAC3C,YAAY,aAAa;AAAA,MACzB,gBAAgB,aAAa;AAAA,MAC7B;AAAA,MACA,iBAAiB;AAAA,IACnB,CAAC,GAEG,CAAC;AACH,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,QAAQ;AAAA,QACR,QAAQ,CAAC;AAAA,QACT,iBAAiB;AAAA,MACnB;AAIF,QAAI,mBAAmB,YAAY,KAAK,aAAa,UAAU;AAC7D,UAAM,iBAAiB,aAAa,UAI9B,sBADc,sBAAsB,gBAAgB,KAAK,aAAa,EACpC,UAAU,CAAC,GAG7C,iBAAiB,CAAC,GAFE,qBAAqB,KAAK,aAAa,KAAK,CAAC,GAEzB,GAAG,mBAAmB;AAEpE,aAAO,KAAK,kCAAkC,eAAe,MAAM,iBAAiB;AAAA,QAClF;AAAA,QACA,WAAW,eAAe;AAAA,MAC5B,CAAC;AASD,UAAM,gBAAgB;AAEtB,aAAO,KAAK;AAAA,QACV,MAAM;AAAA,QACN,aAAa,gBAAgB,cAAc;AAAA,QAC3C,OAAO;AAAA,QACP,UAAU;AAAA;AAAA,MACZ,CAAC;AAAA,IACH;AAMA,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,QAAQ;AAAA,MACR;AAAA,MACA,iBAAiB;AAAA,IACnB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,gBACJ,KACA,SACA,UACA,SAC2B;AAC3B,QAAM,SAA2B;AAAA,MAC/B,SAAS;AAAA,MACT,QAAQ,CAAC;AAAA,IACX,GAEI,iBAAiB,KACjB,iBAAiB,SACf,SAAS,SAAS,UAAU,IAG9B,UAAyB;AAE7B,QAAI;AACF,gBAAU,MAAMC,SAAQC,OAAKC,QAAO,GAAG,eAAe,CAAC;AAEvD,eAAW,SAAS,SAAS,QAAQ;AACnC,eAAO,KAAK,+BAA+B,MAAM,IAAI,EAAE;AAEvD,YAAM,cAAc,MAAM,KAAK;AAAA,UAC7B;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA,SAAS;AAAA,QACX;AASA,YAPA,OAAO,OAAO,KAAK;AAAA,UACjB,OAAO,MAAM;AAAA,UACb,gBAAgB,YAAY;AAAA,UAC5B,SAAS,YAAY;AAAA,UACrB,OAAO,YAAY;AAAA,QACrB,CAAC,GAEG,CAAC,YAAY;AACf,wBAAO,UAAU,IACjB,OAAO,iBAAiB,gBACjB;AAIT,QAAI,YAAY,mBACd,iBAAiB;AAAA,UACf,GAAG;AAAA,UACH,OAAO,YAAY;AAAA;AAAA,UAEnB,SAAS;AAAA,YACP,MAAM;AAAA,YACN,UAAU;AAAA,YACV,YAAY;AAAA,YACZ,UAAU;AAAA,cACR,gBAAgB,YAAY,eAAe;AAAA,cAC3C,uBAAuB;AAAA,cACvB,mBAAmB,oBAAI,IAAI;AAAA,cAC3B,YAAY,YAAY,eAAe;AAAA,cACvC,aAAa;AAAA,gBACX,WAAW,YAAY,eAAe,MAAM,GAAG,CAAC,EAAE,IAAI,OAAK,EAAE,IAAI;AAAA,gBACjE,kBAAkB,CAAC;AAAA,cACrB;AAAA,YACF;AAAA,UACF;AAAA,QACF,GAGA,iBAAiB;AAAA,UACf;AAAA,UACA,EAAE,MAAM,aAAa,UAAU,OAAU;AAAA,UACzC,SAAS;AAAA,QACX;AAAA,MAEJ;AAEA,oBAAO,mBAAmB,gBAC1B,OAAO,iBAAiB,gBACjB;AAAA,IAET,SAAS,OAAO;AACd,oBAAO,MAAM,8BAA8B,EAAE,MAAM,CAAC,GACpD,OAAO,UAAU,IACjB,OAAO,iBAAiB,gBACxB,OAAO,OAAO,KAAK;AAAA,QACjB,OAAO;AAAA,QACP,gBAAgB;AAAA,QAChB,SAAS;AAAA,QACT,OAAQ,MAAgB;AAAA,MAC1B,CAAC,GACM;AAAA,IAET,UAAE;AAEA,UAAI;AACF,YAAI;AACF,gBAAMC,IAAG,SAAS,EAAE,WAAW,IAAM,OAAO,GAAK,CAAC;AAAA,QACpD,SAAS,OAAO;AACd,iBAAO,KAAK,oCAAoC,EAAE,SAAS,MAAM,CAAC;AAAA,QACpE;AAAA,IAEJ;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,sBACZ,SACA,SACmB;AAEnB,QAAI,OAAO,WAAY,YAAY,aAAa;AAC9C,YAAM,IAAI,MAAM,iFAAiF;AAEnG,QAAM,WAAW,MAAM,QAAQ,OAAO,IAAI,UAAU,CAAC,OAAO,GACtD,UAAoB,CAAC;AAG3B,oBAAgBC,WAAU,KAAqC;AAC7D,UAAM,UAAU,MAAMC,IAAG,QAAQ,KAAK,EAAE,eAAe,GAAK,CAAC;AAE7D,eAAW,SAAS,SAAS;AAC3B,YAAM,WAAWJ,OAAK,KAAK,MAAM,IAAI;AAErC,QAAI,MAAM,YAAY,IACpB,OAAOG,WAAU,QAAQ,IAChB,MAAM,OAAO,MACtB,MAAM;AAAA,MAEV;AAAA,IACF;AAGE,mBAAiB,YAAYA,WAAU,OAAO,GAAG;AACjD,UAAM,eAAeE,WAAS,SAAS,QAAQ;AAG/C,eAAW,KAAK;AAGd,YAAIC,WAAU,cAAc,GAAG,EAAE,KAAK,GAAK,CAAC,GAAG;AAC7C,kBAAQ,KAAK,QAAQ;AACrB;AAAA,QACF;AAAA,IAEJ;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aACZ,KACA,SACA,OACA,SACA,QACA,gBAMC;AACD,QAAI;AACF,UAAM,WAAW,mBAAmB,GAC9B,iBAAgC,CAAC,GACnC,iBAAiB,GACf,iBAAiB,oBAAI,IAAY,GAGjC,YAAYN,OAAK,SAAS,MAAM,IAAI,GACpC,cAAcA,OAAK,WAAW,IAAI,GAClC,aAAaA,OAAK,WAAW,KAAK;AACxC,YAAM,UAAU,WAAW,GAC3B,MAAM,UAAU,UAAU;AAG1B,eAAW,QAAQ,IAAI,OAAO;AAC5B,YAAM,WAAWA,OAAK,aAAa,KAAK,IAAI;AAC5C,cAAM,UAAUA,OAAK,UAAU,IAAI,CAAC,GACpC,MAAM,cAAc,UAAU,KAAK,OAAO;AAAA,MAC5C;AAMA,UAAM,eAAe,kCAAkC,IAAI,SAAS,IAAI,GAClE,cAA2B;AAAA,QAC/B,eAAe;AAAA;AAAA,QACf;AAAA,QACA,UAAU;AAAA;AAAA,QACV,aAAa,IAAI,SAAS;AAAA,QAC1B,WAAW;AAAA;AAAA,QACX,WAAW;AAAA,UACT,MAAM,IAAI,SAAS;AAAA,UACnB,SAAS,IAAI,SAAS,WAAW;AAAA,UACjC,UAAU;AAAA;AAAA,UACV,QAAQ,QAAQ,eAAe,YAAY;AAAA;AAAA,UAC3C,gBAAgB,QAAQ,eAAe,YAAY;AAAA;AAAA,UACnD;AAAA,UACA;AAAA,QACF;AAAA,QACA;AAAA,MACF;AAGA,eAAW,QAAQ,MAAM,OAAO;AAE9B,YAAM,gBAAgB,MAAM,KAAK;AAAA,UAC/B,KAAK;AAAA,UACL;AAAA,QACF;AAQA,YANA,OAAO,KAAK,yBAAyB;AAAA,UACnC,SAAS,KAAK;AAAA,UACd,YAAY,cAAc;AAAA,UAC1B,SAAS,cAAc,IAAI,OAAKK,WAAS,aAAa,CAAC,CAAC;AAAA,QAC1D,CAAC,GAEG,cAAc,WAAW,GAAG;AAC9B,iBAAO,MAAM,+BAA+B;AAAA,YAC1C,SAAS,KAAK;AAAA,YACd;AAAA,UACF,CAAC;AACD;AAAA,QACF;AAGA,iBAAW,cAAc,eAAe;AACtC,cAAM,iBAAiBA,WAAS,aAAa,UAAU;AACvD,yBAAe,IAAI,cAAc;AAMjC,cAAM,mBAAmB,oBAAoB,KAAK,IAAI,GAChD,eAAe,oBAAoB,gBAAgB,gBAAgB,GACnE,gBAA6B;AAAA,YACjC,GAAG;AAAA,YACH,WAAW;AAAA,cACT,GAAG,YAAY;AAAA,cACf,YAAY;AAAA,cACZ,WAAWE,UAAQ,cAAc;AAAA,cACjC,YAAYC,WAAS,cAAc;AAAA,cACnC,GAAI,eAAe,EAAE,aAAa,IAAI,CAAC;AAAA,YACzC;AAAA,UACF,GAEI,eAAqB;AAAA,YACvB,GAAG;AAAA,YACH,MAAM;AAAA,UACR;AAEA,cAAI,OAAO,KAAK,MAAO,YAAY,mBAAmB,KAAK,EAAE,GAAG;AAC9D,gBAAM,eAAe,OAAO,KAAK,MAAO,WACpC,KAAK,KACL,wBAAwB,KAAK,IAAI,aAAa,GAC5C,oBAAoB,eAAe,cAAc,eAAe,YAAY,GAC5E,YAAY;AAAA,cAChB;AAAA,cACA;AAAA,cACA;AAAA,cACA;AAAA,YACF,GACM,YAAYH,WAAS,YAAY,SAAS;AAEhD,2BAAe;AAAA,cACb,GAAG;AAAA,cACH,IAAI;AAAA,YACN;AAAA,UACF;AAEA,cAAM,aAAa,MAAM,SAAS,YAAY,cAAc,aAAa;AAEzE,cAAI,CAAC,WAAW;AACd,mBAAO;AAAA,cACL,SAAS;AAAA,cACT;AAAA,cACA,OAAO,6BAA6B,cAAc,KAAK,WAAW,OAAO,OAAO;AAAA,YAClF;AAMF,cAHA,kBAGI,OAAO,WAAW,UAAW,UAAU;AACzC,gBAAM,aAAaA,WAAS,YAAY,WAAW,MAAM;AAGzD,gBAAI;AACF,kBAAM,UAAU,MAAM,aAAa,WAAW,MAAM;AAEpD,6BAAe,KAAK;AAAA,gBAClB,MAAM;AAAA,gBACN;AAAA,gBACA,UAAU;AAAA,cACZ,CAAC;AAAA,YACH,SAAS,OAAO;AACd,qBAAO,KAAK,iCAAiC;AAAA,gBAC3C,QAAQ,WAAW;AAAA,gBACnB;AAAA,cACF,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,aAAO;AAAA,QACL,SAAS;AAAA,QACT;AAAA,QACA,gBAAgB,eAAe,SAAS,IAAI,iBAAiB;AAAA,MAC/D;AAAA,IAEF,SAAS,OAAO;AACd,oBAAO,MAAM,0BAA0B,EAAE,OAAO,MAAM,MAAM,MAAM,CAAC,GAC5D;AAAA,QACL,SAAS;AAAA,QACT,gBAAgB;AAAA,QAChB,OAAQ,MAAgB;AAAA,MAC1B;AAAA,IACF;AAAA,EACF;AACF;AAKO,SAAS,wBAAwB,eAA0C;AAChF,SAAO,IAAI,kBAAkB,aAAa;AAC5C;;;AC1iBA,SAAS,QAAAI,QAAM,YAAAC,YAAU,WAAAC,WAAS,YAAAC,YAAU,WAAAC,gBAAe;;;ACC3D,SAAS,WAAAC,WAAS,QAAAC,cAAY;AAC9B,SAAS,YAAYC,WAAU;;;ACR/B,YAAYC,YAAU;AAwCf,SAAS,oBAAoB,OAAuB;AACzD,SAAO,MAAM,SAAS,GAAG,IAAI,QAAQ,GAAG,KAAK;AAC/C;AAEO,SAAS,YAAY,QAAsD;AAChF,MAAM,aAAa,OAAO,KAAK,MAAM,EAAE,KAAK,GACtC,aAAoC,CAAC;AAC3C,WAAW,OAAO,YAAY;AAC5B,QAAM,SAAS,OAAO,GAAG,KAAK,CAAC;AAE/B,QADmB,OAAO,KAAK,OAAK,OAAO,KAAM,YAAY,MAAM,IAAI,GACvD;AAEd,UAAM,SAAS,CAAC,GAAG,MAAM,EAAE,KAAK,CAAC,GAAG,MAAM;AACxC,YAAM,UAAU,OAAO,KAAM,WAAW,IAAI,EAAE,QACxC,UAAU,OAAO,KAAM,WAAW,IAAI,EAAE;AAC9C,eAAO,QAAQ,cAAc,OAAO;AAAA,MACtC,CAAC,GAEK,OAAO,oBAAI,IAAY;AAC7B,iBAAW,GAAG,IAAI,OAAO,OAAO,UAAQ;AACtC,YAAM,SAAS,OAAO,QAAS,WAAW,OAAO,KAAK;AACtD,eAAI,KAAK,IAAI,MAAM,IAAU,MAC7B,KAAK,IAAI,MAAM,GACR;AAAA,MACT,CAAC;AAAA,IACH,OAAO;AAEL,UAAM,eAAe,CAAC,GAAG,IAAI,IAAI,MAAM,CAAC,EAAE,KAAK;AAC/C,iBAAW,GAAG,IAAI;AAAA,IACpB;AAAA,EACF;AACA,SAAO;AACT;AAoHO,SAAS,SAAS,KAAsB;AAC7C,SAAO,IAAI,SAAS,GAAG;AACzB;;;AD5KA,SAAS,OAAAC,YAAW;;;AELpB,IAAM,mBAAgC,IAAI;AAAA,EACxC,oBAAoB,EAAE,IAAI,OAAK,EAAE,MAAM,EAAE,OAAO,OAAO;AACzD;AAQO,SAAS,iBAAiB,UAAkB,MAAuB;AACxE,SAAO,eAAe,QAAQ,MAAM;AACtC;AAOA,SAAS,cAAc,eAA2C;AAChE,MAAI,CAAC,cAAe,QAAO;AAE3B,MAAM,cAAc,cAAc,QAAQ,OAAO,GAAG,GAC9C,YAAY,YAAY,OAAO,OAAO;AAE5C,MAAI,YAAY,GAAG;AACjB,QAAM,SAAS,YAAY,MAAM,GAAG,SAAS,EAAE,QAAQ,OAAO,EAAE;AAChE,WAAI,YAAY,YAAY,CAAC,GAAG,MAAM,IAAI,IACjC,SAEF,OAAO,SAAS,GAAG,IAAI,OAAO,MAAM,GAAG,OAAO,YAAY,GAAG,CAAC,IAAI;AAAA,EAC3E;AAEA,MAAI,cAAc,IAAI;AAEpB,QAAM,YAAY,YAAY,YAAY,GAAG;AAC7C,WAAO,aAAa,IAAI,YAAY,MAAM,GAAG,SAAS,IAAI;AAAA,EAC5D;AAEA,SAAO;AACT;AAmBO,SAAS,yBACd,SACA,MACA,eACQ;AACR,MAAM,aAAa,QAAQ,QAAQ,OAAO,GAAG,GAGvC,UAAU,cAAc,aAAa,GAGvC;AACJ,MAAI,SAAS;AACX,QAAM,eAAe,QAAQ,SAAS,GAAG,IAAI,UAAU,GAAG,OAAO;AACjE,QAAI,WAAW,WAAW,YAAY;AACpC,kBAAY,WAAW,MAAM,aAAa,MAAM;AAAA,SAC3C;AAAA,UAAI,eAAe;AAExB,eAAO,GAAG,OAAO;AAGjB,kBAAY;AAAA;AAAA,EAEhB;AACE,gBAAY;AAGd,MAAM,QAAQ,UAAU,MAAM,GAAG,GAC3B,OAAO,MAAM,MAAM,SAAS,CAAC;AAGnC,MAAI,iBAAiB,IAAI,IAAI,KAAK,MAAM,UAAU,GAAG;AAQnD,QAAM,YAAY,MAAM,CAAS;AAEjC,QAAI,iBAAiB,WAAW,IAAI;AAClC,aAAO,UAAU,GAAG,OAAO,IAAI,SAAS,KAAK;AAG/C,UAAM,CAAS,IAAI,GAAG,IAAI,IAAI,SAAS;AACvC,QAAMC,gBAAe,MAAM,KAAK,GAAG;AACnC,WAAO,UAAU,GAAG,OAAO,IAAIA,aAAY,KAAKA;AAAA,EAClD;AAGA,MAAI,iBAAiB,MAAM,IAAI;AAC7B,WAAO,UAAU,GAAG,OAAO,IAAI,SAAS,KAAK;AAG/C,MAAI,MAAM,WAAW;AAEnB,WAAO,UAAU,GAAG,OAAO,IAAI,IAAI,IAAI,IAAI,KAAK,GAAG,IAAI,IAAI,IAAI;AAIjE,MAAM,cAAc,MAAM,MAAM,GAAG,EAAE,GAC/B,eAAe,GAAG,IAAI,IAAI,IAAI,IAC9B,eAAe,CAAC,GAAG,aAAa,YAAY,EAAE,KAAK,GAAG;AAC5D,SAAO,UAAU,GAAG,OAAO,IAAI,YAAY,KAAK;AAClD;;;AC1IA,SAAS,WAAAC,WAAS,QAAAC,QAAM,YAAAC,kBAAqB;AAiH7C,SAAS,sBAAsB,KAAa,SAAyB;AACnE,MAAM,MAAMC,WAAS,KAAK,OAAO;AAEjC,SADmB,2BAA2B,GAAG,EAC/B,QAAQ,OAAO,GAAG;AACtC;AAEO,SAAS,kCACd,QACA,gBACsB;AACtB,MAAM,SAAS,OAAO,MAChB,WAAW,OAAO,MAAM,YAAY,CAAC,GACrC,UAAgC,CAAC;AAEvC,WAAW,CAAC,MAAM,KAAK,KAAK,OAAO,QAAQ,QAAQ;AACjD,IAAI,SAAS,kBACb,QAAQ,KAAK;AAAA,MACX,MAAM,OAAO,QAAQ;AAAA,MACrB,aAAa;AAAA,MACb,WAAW,EAAE,SAAS,OAAO,WAAW,GAAG;AAAA,MAC3C,OAAO,OAAO,SAAS,CAAC;AAAA,IAC1B,CAAC;AAGH,SAAO;AACT;AAEA,eAAsB,wBACpB,KACA,gBAC+B;AAC/B,MAAM,SAAS,MAAM,mBAAmB,GAAG;AAC3C,SAAO,kCAAkC,QAAQ,cAAc;AACjE;AAEA,eAAe,2BAA2B,KAAa,YAAuC;AAC5F,MAAM,eAAe,oBAAoB,2BAA2B,UAAU,CAAC,GACzE,SAASC,OAAK,KAAK,YAAY;AACrC,MAAI,CAAE,MAAM,OAAO,MAAM;AACvB,WAAO,CAAC;AAGV,MAAM,YAAsB,CAAC;AAC7B,MAAI;AACF,mBAAiB,WAAW,UAAU,MAAM,GAAG;AAC7C,UAAM,UAAU,sBAAsB,KAAK,OAAO;AAClD,gBAAU,KAAK,OAAO;AAAA,IACxB;AAAA,EACF,SAAS,OAAO;AACd,WAAO,KAAK,iCAAiC,MAAM,KAAK,KAAK,EAAE;AAAA,EACjE;AACA,SAAO;AACT;AAEA,eAAsB,4BACpB,KACA,SACiC;AACjC,MAAM,eAAe,oBAAI,IAA6B,GAChD,sBAAsB,oBAAI,IAA2B;AAE3D,WAAW,UAAU;AACrB,aAAW,CAAC,QAAQ,MAAM,KAAK,OAAO,QAAQ,OAAO,KAAK,GAAG;AAC3D,UAAM,MAAM,2BAA2B,MAAM,GACvC,QAAuB;AAAA,QAC3B,aAAa,OAAO;AAAA,QACpB;AAAA,QACA,MAAM,IAAI,SAAS,GAAG,IAAI,QAAQ;AAAA,MACpC;AAEA,UAAI,MAAM,SAAS,OAAO;AACxB,QAAK,aAAa,IAAI,GAAG,KACvB,aAAa,IAAI,KAAK,CAAC,CAAC,GAE1B,aAAa,IAAI,GAAG,EAAG,KAAK,KAAK;AAEjC,iBAAW,WAAW,QAAQ;AAC5B,cAAM,SAAS,cAAc,OAAO,GAC9B,QAAQ,MAAM,2BAA2B,KAAK,MAAM;AACxD,mBAAW,YAAY;AACrB,YAAK,oBAAoB,IAAI,QAAQ,KACnC,oBAAoB,IAAI,UAAU,KAAK;AAAA,QAG/C;AAAA,MACF;AACE,iBAAW,WAAW,QAAQ;AAC5B,cAAM,UAAU,cAAc,OAAO,GAC/B,kBAAkB,2BAA2B,OAAO;AACxD,UAAK,oBAAoB,IAAI,eAAe,KAC1C,oBAAoB,IAAI,iBAAiB,KAAK;AAAA,QAElD;AAAA,IAEJ;AAGF,SAAO,EAAE,cAAc,oBAAoB;AAC7C;;;AH7CA,eAAsBC,4BAA2B,KAAa,YAAuC;AACnG,MAAM,eAAe,oBAAoB,2BAA2B,UAAU,CAAC,GACzE,SAASC,OAAK,KAAK,YAAY;AACrC,MAAI,CAAE,MAAM,OAAO,MAAM;AACvB,WAAO,CAAC;AAGV,MAAM,YAAsB,CAAC;AAC7B,MAAI;AACF,mBAAiB,WAAW,UAAU,MAAM,GAAG;AAE7C,UAAM,EAAE,UAAU,MAAM,IAAI,MAAM,OAAO,MAAM,GACzC,MAAM,MAAM,KAAK,OAAO;AAC9B,gBAAU,KAAK,2BAA2B,GAAG,EAAE,QAAQ,OAAO,GAAG,CAAC;AAAA,IACpE;AAAA,EACF,SAAS,OAAO;AACd,WAAO,KAAK,iCAAiC,MAAM,KAAK,KAAK,EAAE;AAAA,EACjE;AACA,SAAO;AACT;AAEA,eAAsB,uBACpB,KACA,OACsB;AACtB,MAAM,QAAQ,oBAAI,IAAY;AAC9B,MAAI,CAAC,MAAO,QAAO;AAEnB,WAAW,CAAC,KAAK,MAAM,KAAK,OAAO,QAAQ,MAAM,KAAK;AACpD,QAAI,SAAS,GAAG;AACd,eAAW,WAAW,QAAQ;AAC5B,YAAM,SAAS,cAAc,OAAO,GAC9B,QAAQ,MAAMD,4BAA2B,KAAK,MAAM;AAC1D,iBAAW,OAAO;AAChB,gBAAM,IAAI,2BAA2B,GAAG,CAAC;AAAA,MAE7C;AAAA;AAEA,eAAW,WAAW,QAAQ;AAC5B,YAAM,QAAQ,cAAc,OAAO;AACnC,cAAM,IAAI,2BAA2B,KAAK,CAAC;AAAA,MAC7C;AAIJ,SAAO;AACT;AAiBA,eAAe,kCACb,eACA,QACA,YAC+B;AAC/B,MAAM,kBAAkB,aACnB,WAAW,WAAW,GAAG,IAAI,aAAa,IAAI,UAAU,KACzD,QACE,UAAU,kBACZ,gBAAgB,eAAe,gCAAgC,aAAa,KAC5E,QAAQ,aAAa;AAEzB,SAAO,OAAO;AAAA,IACZ;AAAA,IACA;AAAA,MACE,EAAE,OAAO,mCAAoC,OAAO,YAAY;AAAA,MAChE,EAAE,OAAO,iCAAoC,OAAO,OAAY;AAAA,IAClE;AAAA,EACF;AACF;AAMA,eAAe,qBAAqB,SAAiB,YAAsC;AACzF,MAAI;AACF,QAAI,CAAE,MAAM,OAAO,OAAO,EAAI,QAAO;AACrC,QAAM,WAAW,MAAM,aAAa,SAAS,MAAM;AACnD,QAAI,aAAa,WAAY,QAAO;AACpC,QAAM,CAAC,cAAc,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,MAChD,kBAAkB,QAAQ;AAAA,MAC1B,kBAAkB,UAAU;AAAA,IAC9B,CAAC;AACD,WAAO,iBAAiB;AAAA,EAC1B,SAAS,OAAO;AACd,kBAAO,KAAK,0CAA0C,OAAO,KAAK,KAAK,EAAE,GAClE;AAAA,EACT;AACF;AAyBO,SAAS,uBACd,SACA,aACA,eACQ;AACR,SAAO,yBAAyB,SAAS,aAAa,aAAa;AACrE;AAMA,eAAe,4BACb,KACA,OACA,YACA,YACA,gBACA,qBACe;AACf,MAAM,gBAAgB,2BAA2B,UAAU,GACrD,gBAAgB,2BAA2B,UAAU,GACrD,SAAS,eAAe,IAAI,MAAM,WAAW;AACnD,MAAI,CAAC,OAAQ;AAEb,MAAI,MAAM,SAAS,QAAQ;AACzB,QAAM,SAAS,OAAO,MAAM,MAAM,GAAG;AACrC,QAAI,CAAC,OAAQ;AACb,QAAM,MAAM,OAAO,UAAU,aAAW;AACtC,UAAM,SAAS,cAAc,OAAO;AACpC,aAAO,2BAA2B,MAAM,MAAM;AAAA,IAChD,CAAC;AACD,QAAI,QAAQ,GAAI;AAChB,QAAM,aAAa,OAAO,GAAG;AAC7B,WAAO,GAAG,IAAI,OAAO,cAAe,WAChC,gBACA,EAAE,GAAG,YAAY,QAAQ,cAAc;AAAA,EAC7C;AAIA,MAAI,qBAAqB;AACvB,wBAAoB,wBAAwB;AAAA,MAC1C,aAAa,MAAM;AAAA,MACnB,UAAU,MAAM;AAAA,MAChB,eAAe;AAAA,MACf,eAAe;AAAA,MACf,eAAe;AAAA,QACb,MAAM,OAAO;AAAA,QACb,SAAS,OAAO,WAAW;AAAA,QAC3B,OAAO,OAAO;AAAA,MAChB;AAAA,IACF,CAAC;AACD;AAAA,EACF;AAGA,MAAM,WAAW,MAAM,mBAAmB,GAAG;AAC7C,WAAS,MAAM,WAAW,SAAS,MAAM,YAAY,CAAC;AACtD,MAAM,QAAQ,SAAS,MAAM,SAAS,OAAO,WAAW,GAElD,UACJ,OAAO,QACP,OAAO,SACN,OAAO,WAAW,UACfE,OAAK,uBAAuB,EAAE,UAAU,OAAO,aAAa,OAAO,UAAU,SAASC,IAAG,IACzF;AACN,MAAI,CAAC,SAAS;AACZ,WAAO,KAAK,sCAAsC,OAAO,WAAW,uBAAuB;AAC3F;AAAA,EACF;AAEA,MAAM,YAAY,kBAAkB,SAAS,GAAG;AAChD,WAAS,MAAM,SAAS,OAAO,WAAW,IAAI;AAAA,IAC5C,GAAG;AAAA,IACH,MAAM;AAAA,IACN,SAAS,OAAO,WAAW,OAAO,WAAW;AAAA,IAC7C,OAAO,YAAY,OAAO,SAAS,CAAC,CAAC;AAAA,EACvC,GAEA,MAAM,oBAAoB,QAAQ;AACpC;AAcA,eAAsB,sBACpB,KACA,aACA,gBACA,UAC2B;AAC3B,MAAM,eAAe,WACjB,kCAAkC,UAAU,WAAW,IACvD,MAAM,wBAAwB,KAAK,WAAW,GAC5C,kBAAkB,MAAM,4BAA4B,KAAK,YAAY,GAGrE,gBAA2C,iBAC7C;AAAA,IACE,MAAM;AAAA,IACN;AAAA,IACA,WAAW,EAAE,SAAS,GAAG;AAAA,IACzB,OAAO,eAAe;AAAA,EACxB,IACA,MACE,qBAAqB,MAAM,uBAAuB,KAAK,aAAa,GAEpE,iBAAiB,oBAAI,IAAgC;AAC3D,WAAW,OAAO;AAChB,mBAAe,IAAI,IAAI,aAAa,GAAG;AAGzC,SAAO,EAAE,iBAAiB,oBAAoB,eAAe;AAC/D;AAMO,SAAS,qBACd,eACA,kBACkB;AAClB,MAAM,aAAa,2BAA2B,aAAa,GACrD,QAAQ,iBAAiB,gBAAgB,oBAAoB,IAAI,UAAU;AAEjF,SAAI,QACK,EAAE,MAAM,kBAAkB,MAAM,IAMrC,iBAAiB,wBAAwB,IAAI,UAAU,IAClD,EAAE,MAAM,OAAO,IAKnB,iBAAiB,mBAAmB,IAAI,UAAU,IAIhD,EAAE,MAAM,OAAO,IAHb,EAAE,MAAM,iBAAiB;AAIpC;AAqBA,eAAsB,oBACpB,cACA,SACA,WACA,YACA,SACA,aACA,gBACA,QAC6D;AAC7D,MAAM,mBAAmB,QAAQ,qBAAqB,CAAC,GACjD,aAAa,2BAA2B,OAAO,GAG/C,UAAU,iBAAiB,UAAU,KAAK,iBAAiB,OAAO;AACxE,MAAI;AACF,WAAO,EAAE,UAAU,QAA8B;AAInD,MAAI,QAAQ,SAAS,gBAAgB;AAGnC,QAAM,aACJ,iBAAiB,mBAAmB,cAAc,aAC9CC,WAAU,iBAAiB,mBAC7B,eAAe,UAAU,cAAc,SAAS,sBAChD,eAAe,UAAU;AAC7B,WAAO,EAAE,UAAU,YAAY,SAAAA,SAAQ;AAAA,EACzC;AAEA,MAAM,WAAW,QAAQ;AAGzB,MAAI,YAAY,aAAa,OAAO;AAClC,QAAM,WAAW,UACbA;AACJ,WAAI,aAAa,SACfA,WAAU,iBAAiB,mBACvB,YAAY,UAAU,cAAc,SAAS,+BAA+B,QAAQ,OACpF,YAAY,UAAU,gDAAgD,QAAQ,OACzE,aAAa,eAAe,iBAAiB,qBACtDA,WAAU,eAAe,UAAU,gDAAgD,QAAQ,OAEtF,EAAE,UAAU,SAAAA,SAAQ;AAAA,EAC7B;AAGA,MAAI,iBAAiB;AACnB,WAAO,EAAE,UAAU,YAAY;AAIjC,MAAI,aAAa;AACf,QAAM,IAAI,UAAU,cAAc;AAElC,WAAO,EAAE,UADQ,MAAM,kCAAkC,YAAY,GAAG,UAAU,EAChE;AAAA,EACpB;AAIA,SAAO,EAAE,UAAU,QAAQ,SADX,YAAY,UAAU,kDACH;AACrC;AAcA,eAAsB,iBACpB,KACA,eACA,OACA,kBACA,eACA,QACA,oBACA,qBAC2D;AAC3D,MAAM,aAAa,2BAA2B,aAAa,GACrD,sBAAsB,uBAAuB,YAAY,oBAAoB,aAAa;AAEhG,MAAI;AACF,WAAO;AAAA,MACL;AAAA,MACA,SAAS,cAAc,UAAU,cAAc,MAAM,WAAW,YAAO,mBAAmB;AAAA,IAC5F;AAGF,MAAM,YAAYF,OAAK,KAAK,UAAU,GAChC,gBAAgBA,OAAK,KAAK,mBAAmB;AACnD,eAAM,UAAUG,UAAQ,aAAa,CAAC,GACtC,MAAMC,IAAG,OAAO,WAAW,aAAa,GAExC,MAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,iBAAiB;AAAA,IACjB;AAAA,EACF,GAGA,iBAAiB,gBAAgB,oBAAoB,OAAO,UAAU,GACtE,iBAAiB,gBAAgB,oBAAoB;AAAA,IACnD,2BAA2B,mBAAmB;AAAA,IAC9C;AAAA,EACF,GAEO;AAAA,IACL;AAAA,IACA,SAAS,SAAS,UAAU,cAAc,MAAM,WAAW,YAAO,mBAAmB;AAAA,EACvF;AACF;AAoCA,eAAsB,2BACpB,KACA,SACA,kBACA,SACA,uBACA,iBAA0B,IAC1B,QACA,WACA,qBACA,oBACmC;AACnC,MAAM,WAAqB,CAAC,GACtB,eAAyB,CAAC,GAC1B,cAAc,aAAa,QAAQ,eAAe,IAClD,WAAW,EAAQ,QAAQ,QAO3B,oBAAoB,MAAM,KAAK,iBAAiB,eAAe,KAAK,CAAC,GACrE,oBAAoB,oBAAI,IAAoB,GAC5C,gBAAgB,oBAAI,IAAY;AAEtC,WAAW,QAAQ,mBAAmB;AACpC,QAAM,OAAO,oBAAoB,MAAM,aAAa;AACpD,sBAAkB,IAAI,MAAM,IAAI,GAChC,cAAc,IAAI,IAAI;AAAA,EACxB;AAKA,MAAI,kBAAkB,QAAQ;AAC9B,MAAI,oBAAoB;AACtB,QAAI;AACF,wBAAkB;AAAA;AAElB,UAAI;AAEF,YAAM,iBADW,MAAM,mBAAmB,GAAG,GACd,MAAM,WAAW,qBAAqB;AACrE,QAAI,eAAe,cACjB,kBAAkB,cAAc;AAAA,MAEpC,QAAQ;AAAA,MAER;AAKJ,MAAM,iBAAiB,OAAO,mBAAoB,WAC9C,kBACA,oBAAoB,uBAAuB,aAAa;AAO5D,MAAI,oBAAoB,IAAO;AAC7B,QAAMC,kBAAgC,CAAC,GACjCC,kBAAkC,CAAC;AACzC,aAAW,UAAU,SAAS;AAC5B,UAAM,iBAAiB,qBAAqB,OAAO,SAAS,gBAAgB;AAC5E,UAAI,eAAe,SAAS,kBAAkB;AAE5C,iBAAS;AAAA,UACP,YAAY,OAAO,OAAO,cAAc,eAAe,MAAO,WAAW;AAAA,QAC3E;AACA;AAAA,MACF;AACA,MAAAD,gBAAe,KAAK,MAAM;AAAA,IAC5B;AACA,WAAO;AAAA,MACL,gBAAAA;AAAA,MACA;AAAA,MACA,sBAAsB;AAAA,MACtB,gBAAAC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAaA,MAAM,kBAAoC,CAAC,GAEvC,yBAAyB,oBAAoB,MAAQ,OAAO,mBAAoB;AAEpF,WAAW,UAAU,SAAS;AAE5B,QAAI,OAAO,aAAa;AACtB,sBAAgB,KAAK,EAAE,MAAM,OAAO,CAAC;AACrC;AAAA,IACF;AAEA,QAAM,iBAAiB,qBAAqB,OAAO,SAAS,gBAAgB;AAE5E,QAAI,eAAe,SAAS,QAAQ;AAClC,sBAAgB,KAAK,EAAE,MAAM,OAAO,CAAC;AACrC;AAAA,IACF;AAEA,QAAI,eAAe,SAAS,kBAAkB;AAE5C,UAAM,EAAE,UAAAC,WAAU,SAAAL,SAAQ,IAAI,MAAM;AAAA,QAClC;AAAA,QACA,OAAO;AAAA,QACP,eAAe,MAAO;AAAA,QACtB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAEA,sBAAgB,KAAK,EAAE,MAAM,kBAAkB,OAAO,eAAe,OAAQ,UAAAK,WAAU,SAAAL,SAAQ,CAAC,GAE5FK,cAAa,gBACf,yBAAyB;AAE3B;AAAA,IACF;AAGA,QAAM,YAAYP,OAAK,KAAK,2BAA2B,OAAO,OAAO,CAAC;AAEtE,QAAI,CADe,MAAM,OAAO,SAAS,GACxB;AACf,sBAAgB,KAAK,EAAE,MAAM,OAAO,CAAC;AACrC;AAAA,IACF;AAMA,QAAI,mBAAmB,OAAO;AAC9B,QAAI,qBAAqB,UAAa,OAAO;AAC3C,UAAI;AACF,2BAAmB,MAAM,OAAO,qBAAqB;AAAA,MACvD,QAAQ;AAAA,MAER;AAEF,QAAI,qBAAqB,UAAa,OAAO;AAC3C,UAAI;AACF,2BAAmB,MAAM,aAAa,OAAO,eAAe,MAAM;AAAA,MACpE,QAAQ;AAAA,MAER;AAEF,QAAI,qBAAqB,UAEnB,CADmB,MAAM,qBAAqB,WAAW,gBAAgB,GACxD;AACnB,mBAAa,KAAK,OAAO,OAAO,GAChC,gBAAgB,KAAK,EAAE,MAAM,OAAO,CAAC;AACrC;AAAA,IACF;AAIF,QAAM,EAAE,UAAU,QAAQ,IAAI,MAAM;AAAA,MAClC;AAAA,MACA,OAAO;AAAA,MACP;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,oBAAgB,KAAK,EAAE,MAAM,kBAAkB,UAAU,QAAQ,CAAC,GAE9D,aAAa,gBACf,yBAAyB;AAAA,EAE7B;AAMA,MAAM,iBAAgC,CAAC,GACjC,iBAAkC,CAAC;AAEzC,WAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,QAAM,SAAS,QAAQ,CAAC,GAClB,MAAM,gBAAgB,CAAC;AAG7B,QAAI,IAAI,SAAS,QAAQ;AACvB,UAAI,0BAA0B,CAAC,OAAO,aAAa;AAEjD,YAAM,gBAAgB;AAAA,UACpB,OAAO;AAAA,UACP;AAAA,UACA,OAAO;AAAA,QACT,GACM,gBAAgBA,OAAK,KAAK,aAAa;AAC7C,uBAAe,KAAK,EAAE,GAAG,QAAQ,SAAS,eAAe,SAAS,cAAc,CAAC;AAAA,MACnF;AACE,uBAAe,KAAK,MAAM;AAE5B;AAAA,IACF;AAGA,QAAI,IAAI,SAAS,kBAAkB;AACjC,UAAM,EAAE,UAAU,QAAQ,IAAI;AAG9B,UAFI,WAAS,SAAS,KAAK,OAAO,GAE9B,aAAa,OAAQ;AAEzB,UAAI,aAAa,aAAa;AAC5B,YAAI;AAEF,cAAM,YAAY,kBAAkB,IAAI,IAAI,MAAM,WAAW,KACxD,oBAAoB,IAAI,MAAM,aAAa,aAAa,GACvD,EAAE,qBAAqB,SAAS,OAAO,IAAI,MAAM;AAAA,YACrD;AAAA,YACA,OAAO;AAAA,YACP,IAAI;AAAA,YACJ;AAAA,YACA,OAAO;AAAA,YACP;AAAA,YACA;AAAA,YACA;AAAA,UACF;AACA,mBAAS,KAAK,MAAM,GACpB,eAAe,KAAK,EAAE,MAAM,2BAA2B,OAAO,OAAO,GAAG,IAAI,oBAAoB,CAAC;AAGjG,cAAM,gBAAgB;AAAA,YACpB,OAAO;AAAA,YACP;AAAA,YACA,OAAO;AAAA,UACT,GACM,gBAAgBA,OAAK,KAAK,aAAa;AAC7C,yBAAe,KAAK,EAAE,GAAG,QAAQ,SAAS,eAAe,SAAS,cAAc,CAAC;AAAA,QACnF,SAAS,OAAO;AACd,mBAAS,KAAK,uBAAuB,OAAO,OAAO,KAAK,KAAK,EAAE;AAAA,QAEjE;AACA;AAAA,MACF;AAGA,MAAK,YACH,iBAAiB,gBAAgB,oBAAoB;AAAA,QACnD,2BAA2B,OAAO,OAAO;AAAA,MAC3C,GAEE,YACF,SAAS,KAAK,mBAAmB,OAAO,OAAO,wBAAwB,IAAI,MAAM,WAAW,IAAI,GAElG,eAAe,KAAK,MAAM;AAC1B;AAAA,IACF;AAGA,QAAI,IAAI,SAAS,kBAAkB;AACjC,UAAM,EAAE,UAAU,QAAQ,IAAI;AAG9B,UAFI,WAAS,SAAS,KAAK,OAAO,GAE9B,aAAa,OAAQ;AAEzB,UAAI,aAAa,aAAa;AAE5B,YAAM,gBAAgB;AAAA,UACpB,OAAO;AAAA,UACP;AAAA,UACA,OAAO;AAAA,QACT,GACM,gBAAgBA,OAAK,KAAK,aAAa;AAC7C,QAAI,WACF,SAAS;AAAA,UACP,iBAAiB,OAAO,OAAO,OAAO,aAAa;AAAA,QAErD,IAEA,SAAS;AAAA,UACP,gBAAgB,aAAa,mCACK,OAAO,OAAO;AAAA,QAClD,GAEF,eAAe,KAAK,EAAE,GAAG,QAAQ,SAAS,eAAe,SAAS,cAAc,CAAC;AACjF;AAAA,MACF;AAGA,MAAI,YACF,SAAS,KAAK,uCAAuC,OAAO,OAAO,qBAAqB,GAE1F,eAAe,KAAK,MAAM;AAC1B;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,sBAAsB;AAAA,IACtB,cAAc,yBAAyB,iBAAiB;AAAA,IACxD;AAAA,IACA;AAAA,EACF;AACF;;;ADv3BO,IAAM,2BAAN,cAAuC,aAAa;AAAA,EAApD;AAAA;AACL,SAAS,OAAO;AAAA;AAAA,EAEhB,UAAU,QAAuB,UAA6B;AAE5D,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,QACJ,SACA,SACA,iBAA0B,IACE;AAC5B,QAAM,EAAE,aAAa,aAAa,eAAe,UAAU,OAAO,IAAI;AAItE,QAFA,KAAK,qBAAqB,OAAO,GAE7B,CAAC,kBAAkB,UAAU,aAAa;AAC5C,aAAO,KAAK,kBAAkB;AAIhC,QAAM,QAAQ,KAAK,mBAAmB,UAAU,aAAa;AAC7D,QAAI,MAAM,WAAW;AACnB,aAAO,KAAK,kBAAkB;AAIhC,QAAM,cAAqC;AAAA,MACzC;AAAA,MAAa;AAAA,MAAa;AAAA,MAAe;AAAA,MACzC,gBAAgB,QAAQ;AAAA,MACxB,UAAU,QAAQ;AAAA,MAClB,QAAQ,QAAQ;AAAA,MAChB,mBAAmB,QAAQ;AAAA,MAC3B,gBAAgB,QAAQ;AAAA,IAC1B,GACM,cAAc,uBAAuB,aAAa,SAAS,GAG3D,kBAAkB,MAAM;AAAA,MAC5B;AAAA,MAAO;AAAA,MAAa;AAAA,IACtB,GAKM,mBAAmB,WAAW,CAAC,GAC/B,mBAA6B,CAAC,GAChC,gBAAgB,IAChB,yBAA8D,CAAC;AAEnE,QAAI;AAEF,UAAM,WAAW,KAAK,qBAAqB,iBAAiB,WAAW,GACjE,UAAU,SAAS;AAEzB,UAAI,QAAQ,SAAS,GAAG;AAEtB,YAAI;AACJ,YAAI,QAAQ;AACV,6BAAmB,QAAQ;AAAA,aACtB;AAEL,cAAM,WAAW,QAAQ,qBACnB,iBAAiB,WACnB,KAAK,sBAAsB,UAAU,WAAW,IAChD,MAAM,KAAK,wBAAwB,eAAe,WAAW;AAGjE,6BAAmB,MAAM;AAAA,YACvB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF;AAAA,QACF;AAGA,YAAM,qBAAqB,QAAQ,qBAC/B,MAAM,WAAW,WAAW,GAAG,WAG7B,EAAE,gBAAgB,UAAU,sBAAsB,cAAc,gBAAgB,aAAa,IAAI,MAAM;AAAA,UAC3G;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA,QAAQ;AAAA,UACR;AAAA,UACA,QAAQ;AAAA,UACR;AAAA,QACF;AACA,yBAAiB,KAAK,GAAG,QAAQ,GACjC,gBAAgB,sBAChB,yBAAyB;AAIzB,YAAM,eAAe,oBAAI,IAAyB;AAClD,iBAAW,MAAM;AACf,uBAAa,IAAI,GAAG,GAAG,aAAa,KAAK,GAAG,aAAa,IAAI,EAAE;AAGjE,YAAM,kBAAkB,oBAAI,IAAoB,GAC1C,uBAAuB,oBAAI,IAAY;AAC7C,iBAAW,KAAK,SAAS;AACvB,cAAM,MAAM,GAAG,EAAE,aAAa,KAAK,EAAE,aAAa,IAC5C,QAAQ,aAAa,IAAI,GAAG;AAClC,cAAI,CAAC,MAAO;AAEZ,cAAM,WAAW,2BAA2B,EAAE,OAAO;AAGrD,cAFA,qBAAqB,IAAI,QAAQ,GAE7B,sBAAsB;AACxB,gBAAM,UAAU,2BAA2B,MAAM,OAAO;AACxD,YAAI,aAAa,WACf,gBAAgB,IAAI,UAAU,OAAO;AAAA,UAEzC;AAAA,QACF;AACA,YAAM,gBAAgB,KAAK,mBAAmB,UAAU,oBAAoB,GAGtE,kBAA+B,gBAAgB,OAAO,IACxD,EAAE,GAAG,aAAa,gBAAgB,IAClC,aAGEQ,mBAAkB,MAAM,mBAAmB,eAAe,eAAe,GACzEC,UAAS,uBAAuBD,kBAAiB,aAAa,UAAU,MAAM;AAGpF,YAAIA,iBAAgB,UAAU;AAC5B,mBAAW,KAAKA,iBAAgB;AAC9B,mBAAO,KAAK,CAAC;AAKjB,iBAAW,OAAO;AAChB,iBAAO,KAAK,GAAG,GACfC,QAAO,UAAU,KAAK;AAAA,YACpB,YAAY;AAAA,YACZ,UAAU,CAAC,EAAE,aAAa,UAAU,GAAG,QAAQ,GAAK,CAAC;AAAA,YACrD,SAAS;AAAA,UACX,CAAC;AAIH,eAAAA,QAAO,aAAa,eACpBA,QAAO,gBAAgB,cACvBA,QAAO,iBAAiB,wBACxBA,QAAO,eAAe,cAEtB,KAAK,WAAWA,SAAQ,OAAO,GACxBA;AAAA,MACT;AAAA,IACF,SAAS,OAAO;AAEd,aAAO,KAAK,uCAAuC,WAAW,KAAK,KAAK,uCAAuC;AAAA,IACjH;AAGA,QAAM,kBAAkB,MAAM,mBAAmB,iBAAiB,WAAW;AAG7E,QAAI,gBAAgB,UAAU;AAC5B,eAAW,KAAK,gBAAgB;AAC9B,eAAO,KAAK,CAAC;AAKjB,QAAM,SAAS,uBAAuB,iBAAiB,aAAa,UAAU,MAAM;AAEpF,gBAAK,WAAW,QAAQ,OAAO,GAExB;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAaQ,qBACN,aACA,aACiB;AACjB,QAAM,UAAyB,CAAC,GAC1B,YAAsD,CAAC;AAE7D,aAAW,CAAC,MAAM,OAAO,KAAK,aAAa;AACzC,UAAM,eAAe,oBAAoB,KAAK,IAAI,GAG5C,cAAc,GAClB,KAAK,SAAS,KAAK,UAAU;AAG/B,eAAW,aAAa;AACtB,YAAI;AACF,cAAM,YAAYC,OAAK,YAAY,aAAa,SAAS,GACnD,eAAe,oBAAoB,WAAW,YAAY,GAE1D,gBAA6B;AAAA,YACjC,GAAG;AAAA,YACH,WAAW;AAAA,cACT,GAAG,YAAY;AAAA,cACf,YAAY;AAAA,cACZ,WAAWC,UAAQ,SAAS;AAAA,cAC5B,YAAYC,WAAS,SAAS;AAAA,cAC9B,GAAI,eAAe,EAAE,aAAa,IAAI,CAAC;AAAA,YACzC;AAAA,UACF,GAEM,eAAe;AAAA,YACnB,KAAK;AAAA,YACL,CAAC,OAAO,wBAAwB,IAAI,aAAa;AAAA,UACnD,KAAK,IACC,oBAAoB,eAAe,cAAc,eAAe,YAAY,GAC5E,YAAY;AAAA,YAChB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF,GAEM,eAAeC,WAAS,YAAY,eAAe,SAAS,GAC5D,YAAY,2BAA2B,YAAY,GAInD,oBAAoB,CAAC,aAMvB;AACJ,cACE,qBACG,CAAC,kBAAkB,MAAM,WAAW,WAAW,WAAW,KAC1D,oBAAoB,IAAIC,SAAQ,SAAS,EAAE,YAAY,CAAC,KACxD,YAAY,YACZ,YAAY,cAAc,WAC7B;AACA,gBAAM,oBAAoB,WACpB,mBAAmB,YAAY,UAC/B,wBAAwB,YAAY;AAC1C,mCAAuB,YAAY;AACjC,kBAAM,MAAM,MAAM,aAAa,mBAAmB,MAAM;AACxD,qBAAO,4BAA4B,KAAK,kBAAkB,qBAAqB;AAAA,YACjF;AAAA,UACF;AAEA,kBAAQ,KAAK;AAAA,YACX,SAAS;AAAA,YACT,SAAS;AAAA,YACT,eAAe;AAAA,YACf;AAAA,YACA,eAAe,oBAAoB,YAAY;AAAA,YAC/C;AAAA,UACF,CAAC,GACD,UAAU,KAAK,EAAE,MAAM,UAAU,CAAC;AAAA,QACpC,QAAQ;AAAA,QAGR;AAAA,IAEJ;AAEA,WAAO,EAAE,SAAS,UAAU;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,mBACN,UACA,sBACqB;AACrB,QAAM,SAAS,oBAAI,IAAoB;AACvC,aAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,QAAQ,KAAK;AAChD,UAAM,QAAQ,SAAS,QAAQ,CAAC,GAC1B,EAAE,MAAM,UAAU,IAAI,SAAS,UAAU,CAAC;AAChD,OAAI,MAAM,eAAe,qBAAqB,IAAI,2BAA2B,MAAM,OAAO,CAAC,OACpF,OAAO,IAAI,IAAI,KAAG,OAAO,IAAI,MAAM,CAAC,CAAC,GAC1C,OAAO,IAAI,IAAI,EAAG,KAAK,SAAS;AAAA,IAEpC;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,sBACN,UACA,aACyC;AACzC,QAAM,QAAQ,SAAS,MAAM,WAAW,WAAW;AACnD,WAAK,QACE,EAAE,OAAO,MAAM,SAAS,CAAC,EAAE,IADf;AAAA,EAErB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,wBACZ,KACA,aACkD;AAClD,QAAI;AAEF,UAAM,SADW,MAAM,mBAAmB,GAAG,GACtB,MAAM,WAAW,WAAW;AACnD,aAAK,QACE,EAAE,OAAO,MAAM,SAAS,CAAC,EAAE,IADf;AAAA,IAErB,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AACF;;;AKnYO,SAAS,iCACd,gBACoB;AACpB,MAAI,CAAC;AACH,WAAO;AAGT,MAAM,aAAa,eAAe,QAAQ,OAAO,GAAG,EAAE,QAAQ,UAAU,EAAE,GACpE,qBAAqB,WAAW,SAAS,KAAK,GAE9C,YADc,qBAAqB,WAAW,MAAM,GAAG,EAAE,IAAI,YACtC,MAAM,GAAG,EAAE,OAAO,OAAO;AAEtD,WAAW,QAAQ,oBAAoB,GAAG;AACxC,QAAI,CAAC,KAAK;AACR;AAGF,QAAM,cAAc,SAAS,YAAY,KAAK,OAAO;AACrD,QAAI,gBAAgB;AAClB;AAGF,QAAM,eAAe,SAAS,MAAM,WAAW,EAAE,KAAK,GAAG;AACzD,WAAO,qBAAqB,GAAG,YAAY,QAAQ;AAAA,EACrD;AAEA,SAAO;AACT;;;A/BeO,IAAM,4BAAN,cAAwC,aAAa;AAAA,EAArD;AAAA;AACL,SAAS,OAAO;AAAA;AAAA,EAEhB,UAAU,QAAuB,UAA6B;AAC5D,WAAO,gBAAgB,QAAQ,QAAQ;AAAA,EACzC;AAAA,EAEA,MAAM,QACJ,SACA,SACA,iBAA0B,IACE;AAC5B,QAAM,EAAE,aAAa,aAAa,eAAe,UAAU,OAAO,IAAI;AAEtE,SAAK,qBAAqB,OAAO,GAEjC,OAAO,KAAK,cAAc,WAAW,SAAS,QAAQ,eAAe,YAAY,SAAS,OAAO,QAAQ,SAAS;AAElH,QAAI;AAEF,UAAM,eAAe,MAAM,KAAK,iBAAiB,WAAW,GAGtD,MAAe;AAAA,QACnB,UAAU;AAAA,UACR,MAAM;AAAA,UACN,SAAS,QAAQ;AAAA,QACnB;AAAA,QACA,OAAO;AAAA,QACP,SAAS,QAAQ,iBAAiB,MAAM,KAAK,aAAa,WAAW;AAAA,MACvE,GAKM,oBAAoB,wBAAwB,IAAI,OAAQ,GAGxD,mBAAmB,MADP,wBAAwB,aAAa,EACd;AAAA,QACvC;AAAA,QACA;AAAA;AAAA,QACA;AAAA;AAAA,QACA,EAAE,OAAO;AAAA,MACX;AAEA,aAAI,CAAC,iBAAiB,WAAW,CAAC,iBAAiB,oBACjD,OAAO,MAAM,6BAA6B;AAAA,QACxC,SAAS;AAAA,QACT,QAAQ,iBAAiB;AAAA,MAC3B,CAAC,GAEM,KAAK;AAAA,QACV;AAAA,QACA,IAAI,MAAM,mBAAmB;AAAA,QAC7B;AAAA,MACF,MAGF,OAAO;AAAA,QACL,4CAA4C,iBAAiB,OAAO,MAAM,0BAC1D,QAAQ;AAAA,MAC1B,GAGO,MAAM,KAAK;AAAA,QAChB,iBAAiB;AAAA,QACjB,iBAAiB,kBAAkB;AAAA,QACnC;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IAEF,SAAS,OAAO;AACd,oBAAO,MAAM,kCAAkC,EAAE,aAAa,MAAM,CAAC,GAC9D,KAAK;AAAA,QACV;AAAA,QACA;AAAA,QACA,sCAAuC,MAAgB,OAAO;AAAA,MAChE;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,iBAAiB,aAAwE;AACrG,QAAM,eAAyD,CAAC;AAEhE,mBAAiB,cAAcC,WAAU,WAAW,GAAG;AACrD,UAAM,eAAeC,WAAS,aAAa,UAAU;AAErD,UAAI,aAAa,WAAW,eAAe,KAAK,iBAAiB;AAC/D;AAGF,UAAM,UAAU,MAAM,aAAa,UAAU;AAC7C,mBAAa,KAAK,EAAE,MAAM,cAAc,SAAS,UAAU,OAAO,CAAQ;AAAA,IAC5E;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aAAa,aAA6C;AACtE,QAAM,QAAkD,CAAC;AAEzD,mBAAiB,YAAYD,WAAU,WAAW,GAAG;AACnD,UAAM,eAAeC,WAAS,aAAa,QAAQ;AAEnD,MAAI,aAAa,WAAW,OAAO,KAAK,iBAAiB,UAIzD,MAAM,KAAK,EAAE,MAAM,cAAc,SAAS,GAAG,CAAC;AAAA,IAChD;AAEA,WAAO,oBAAoB,KAAK;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,wBACZ,kBACA,mBACA,SACA,SACA,iBAA0B,IACE;AAC5B,QAAI,kBAAiC;AAErC,QAAI;AAEF,wBAAkB,MAAM,2BAA2B,GAGnD,MAAM,sBAAsB,iBAAiB,OAAO,eAAe,GAGnE,MAAM,uBAAuB,mBAAmB,eAAe;AAG/D,UAAM,eAAe,IAAI,yBAAyB,GAE5C,mBAAuC;AAAA,QAC3C,GAAG;AAAA,QACH,aAAa;AAAA;AAAA,QAEb,eAAe,iBAAiB;AAAA;AAAA,QAEhC;AAAA,QACA,gBAAgB,iCAAiC,QAAQ,cAAc;AAAA,MACzE,GAEM,gBAAgB,MAAM,aAAa,QAAQ,kBAAkB,SAAS,cAAc;AAG1F,mBAAM,qBAAqB,eAAe,GAEnC;AAAA,IAET,SAAS,OAAO;AACd,mBAAM,qBAAqB,eAAe,GAE1C,OAAO,MAAM,uCAAuC;AAAA,QAClD,aAAa,QAAQ;AAAA,QACrB;AAAA,MACF,CAAC,GAEM,KAAK;AAAA,QACV;AAAA,QACA;AAAA,QACA,wCAAyC,MAAgB,OAAO;AAAA,MAClE;AAAA,IACF;AAAA,EACF;AACF;;;AgCvMO,SAAS,sBACd,SACA,SACsB;AACtB,MAAM,SAAS,QAAQ,eACjB,WAAW,QAAQ;AAGzB,SAAK,SAKD,gBAAgB,QAAQ,QAAQ,IAC3B,IAAI,0BAA0B,IAIhC,IAAI,yBAAyB,IAT3B,IAAI,yBAAyB;AAUxC;;;ACnCA,SAAS,YAAAC,kBAAgB;AAiBzB,eAAsB,oBACpB,aACmD;AACnD,MAAM,QAAkD,CAAC;AAEzD,MAAI;AACF,mBAAiB,YAAYC,WAAU,WAAW,GAAG;AACnD,UAAM,eAAeC,WAAS,aAAa,QAAQ;AAGnD,MAAI,aAAa,WAAW,OAAO,KAAK,iBAAiB,UAIzD,MAAM,KAAK;AAAA,QACT,MAAM;AAAA,QACN,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF,SAAS,OAAO;AACd,WAAO,MAAM,yDAAyD;AAAA,MACpE;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAUA,eAAsB,0BACpB,aACwB;AACxB,MAAM,QAAQ,MAAM,oBAAoB,WAAW;AACnD,SAAO,oBAAoB,KAAK;AAClC;AAWA,eAAsB,qCACpB,aACuE;AACvE,MAAM,QAAQ,MAAM,oBAAoB,WAAW;AACnD,SAAO,+BAA+B,KAAK;AAC7C;;;AChCA,eAAsB,wBACpB,gBACA,SACA,iBAA0B,IACE;AAC5B,MAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,gBAEE,SAA4B;AAAA,IAChC,SAAS;AAAA,IACT,gBAAgB;AAAA,IAChB,cAAc;AAAA,IACd,WAAW,CAAC;AAAA,IACZ,QAAQ,CAAC;AAAA,IACT,aAAa,CAAC;AAAA,IACd,aAAa,CAAC;AAAA,EAChB;AAEA,MAAI;AAEF,QAAI,CAAC,kBAAkB,UAAU,aAAa;AAC5C,oBAAO,MAAM,YAAY,QAAQ,uDAAuD,GACjF;AAIT,QAAM,gBAAgB,eAAe,iBACnC,MAAM,0BAA0B,WAAW;AAE7C,WAAO,MAAM,kBAAkB;AAAA,MAC7B,SAAS;AAAA,MACT,MAAM,cAAc;AAAA,MACpB,UAAU,cAAc;AAAA,MACxB,YAAY,cAAc;AAAA,MAC1B,gBAAgB;AAAA,MAChB,QAAQ,eAAe,gBAAgB,aAAa;AAAA,IACtD,CAAC;AAGD,QAAM,kBAAsC;AAAA,MAC1C,GAAG;AAAA,MACH;AAAA,IACF,GAGM,iBAAiB,MADN,sBAAsB,iBAAiB,OAAO,EACzB,QAAQ,iBAAiB,SAAS,cAAc;AAGtF,iCAAsB,gBAAgB,aAAa,UAAU,UAAU,EAAK,GAErE;AAAA,EAET,SAAS,OAAO;AACd,kBAAO,UAAU,IACjB,OAAO,MAAM,6BAA6B,WAAW,gBAAiB,MAAgB,OAAO,EAAE,GAC/F,OAAO,OAAO,KAAK;AAAA,MACjB,MAAM,EAAE,MAAM,aAAa,IAAI,cAAc;AAAA,MAC7C,YAAY;AAAA,MACZ;AAAA,MACA,SAAS,wBAAyB,MAAgB,OAAO;AAAA,IAC3D,CAAC,GACM;AAAA,EACT;AACF;;;AC7GA,SAAS,QAAAC,cAAY;;;ACJrB,SAAS,QAAAC,cAAY;AAKrB,OAAOC,YAAU;AACjB,YAAYC,WAAU;AAOtB,SAAS,iBAAiB,UAA8B;AAEtD,UADY,SAAS,YAAY,EAAE,MAAM,GAAG,EAAE,IAAI,GACrC;AAAA,IACX,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AAAA,IACL,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;AAKA,SAAS,aAAa,SAAiB,QAAyB;AAC9D,MAAI;AACF,YAAQ,QAAQ;AAAA,MACd,KAAK;AAAA,MACL,KAAK;AAEH,YAAM,UAAU,WAAW,UACvB,QAAQ,QAAQ,aAAa,EAAE,EAAE,QAAQ,qBAAqB,EAAE,IAChE;AACJ,eAAO,KAAK,MAAM,OAAO;AAAA,MAE3B,KAAK;AAAA,MACL,KAAK;AACH,eAAOD,OAAK,KAAK,OAAO;AAAA,MAE1B,KAAK;AACH,eAAY,YAAM,OAAO;AAAA,MAE3B;AACE,eAAO;AAAA,IACX;AAAA,EACF,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,mBAAmB,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,EAAE;AAAA,EAC7G;AACF;AAKA,SAAS,iBAAiB,MAAW,QAA4B;AAC/D,MAAI;AACF,YAAQ,QAAQ;AAAA,MACd,KAAK;AAAA,MACL,KAAK;AACH,eAAO,KAAK,UAAU,MAAM,MAAM,CAAC;AAAA,MAErC,KAAK;AAAA,MACL,KAAK;AACH,eAAOA,OAAK,KAAK,MAAM,EAAE,QAAQ,GAAG,WAAW,GAAG,WAAW,GAAG,CAAC;AAAA,MAEnE,KAAK;AACH,eAAY,gBAAU,IAAI;AAAA,MAE5B;AACE,eAAO,OAAO,QAAS,WAAW,OAAO,KAAK,UAAU,MAAM,MAAM,CAAC;AAAA,IACzE;AAAA,EACF,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,uBAAuB,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,EAAE;AAAA,EACjH;AACF;AAWA,eAAsB,yBACpB,WACA,YACA,cACiD;AACjD,MAAM,UAAUE,OAAK,WAAW,UAAU;AAE1C,MAAI,CAAE,MAAM,OAAO,OAAO;AACxB,WAAO,EAAE,SAAS,IAAO,SAAS,GAAM;AAI1C,MAAM,UAAU,MAAM,aAAa,OAAO,GACpC,SAAS,iBAAiB,UAAU,GACtC;AAEJ,MAAI;AACF,WAAO,aAAa,SAAS,MAAM;AAAA,EACrC,SAAS,OAAO;AACd,kBAAO,KAAK,mBAAmB,UAAU,qBAAqB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC,EAAE,GAC/G,EAAE,SAAS,IAAO,SAAS,GAAM;AAAA,EAC1C;AAEA,MAAM,mBAAmB,CAAC,KAAU,YAA6B;AAC/D,QAAI,CAAC,OAAO,OAAO,OAAQ,SAAU,QAAO;AAC5C,QAAM,QAAQ,QAAQ,MAAM,GAAG,EAAE,OAAO,OAAO,GAC3C,UAAe;AACnB,aAAW,QAAQ,OAAO;AACxB,UAAI,CAAC,WAAW,OAAO,WAAY,YAAY,EAAE,QAAQ,SAAU,QAAO;AAC1E,gBAAU,QAAQ,IAAI;AAAA,IACxB;AACA,WAAO;AAAA,EACT,GAEM,iBAAiB,aAAa,OAAO,OAAK,OAAO,KAAM,YAAY,iBAAiB,MAAM,CAAC,CAAC,EAAE;AAEpG,WAAW,OAAO;AAChB,oBAAgB,MAAM,GAAG;AAI3B,MAAI,mBAAmB,IAAI;AACzB,iBAAM,OAAO,OAAO,GACpB,OAAO,KAAK,uBAAuB,UAAU,EAAE,GACxC,EAAE,SAAS,IAAM,SAAS,GAAM;AAIzC,MAAM,aAAa,iBAAiB,MAAM,MAAM;AAChD,eAAM,cAAc,SAAS,UAAU,GACvC,OAAO,KAAK,WAAW,UAAU,aAAa,aAAa,MAAM,OAAO,GACjE,EAAE,SAAS,IAAO,SAAS,GAAK;AACzC;AAWA,eAAsB,kBACpB,WACA,SACA,aACmD;AACnD,MAAM,UAAoB,CAAC,GACrB,UAAoB,CAAC;AAE3B,MAAI,OAAO,WAAY,UAAU;AAE/B,QAAM,UAAUA,OAAK,WAAW,OAAO;AACvC,IAAI,MAAM,OAAO,OAAO,MACtB,MAAM,OAAO,OAAO,GACpB,QAAQ,KAAK,OAAO,GACpB,OAAO,MAAM,iBAAiB,OAAO,EAAE;AAAA,EAE3C,OAAO;AAEL,QAAM,aAAa,QAAQ;AAE3B,QAAI,QAAQ,UAAU;AAGpB,aAAO,MAAM,+DAA+D,UAAU,EAAE;AAAA,aAC/E,QAAQ,QAAQ,QAAQ,KAAK,SAAS,GAAG;AAElD,UAAM,SAAS,MAAM,yBAAyB,WAAW,YAAY,QAAQ,IAAI;AAEjF,MAAI,OAAO,UACT,QAAQ,KAAK,UAAU,IACd,OAAO,WAChB,QAAQ,KAAK,UAAU,GAGzB,OAAO;AAAA,QACL,WAAW,QAAQ,KAAK,MAAM,cAAc,UAAU,KAAK,QAAQ,KAAK,KAAK,IAAI,CAAC;AAAA,MACpF;AAAA,IACF,WAAW,QAAQ,UAAU,UAAU,QAAQ,UAAU;AAEvD,aAAO;AAAA,QACL,2BAA2B,UAAU,QAAQ,WAAW;AAAA,MAE1D;AAAA,SAEK;AAEL,UAAM,UAAUA,OAAK,WAAW,UAAU;AAC1C,MAAI,MAAM,OAAO,OAAO,MACtB,MAAM,OAAO,OAAO,GACpB,QAAQ,KAAK,UAAU,GACvB,OAAO,MAAM,iBAAiB,UAAU,EAAE;AAAA,IAE9C;AAAA,EACF;AAEA,SAAO,EAAE,SAAS,QAAQ;AAC5B;;;AC3NA,OAAOC,WAAU;AACjB,SAAS,WAAAC,gBAAe;AAuBxB,eAAsB,oBACpB,SACA,cACA,gBAA6B,oBAAI,IAAI,GACtB;AACf,MAAM,gBAAgB,oBAAI,IAAY;AAGtC,WAAW,eAAe,cAAc;AACtC,QAAI,UAAUC,MAAK,QAAQ,WAAW;AAGtC,WAAO,QAAQ,WAAW,OAAO,KAAK,YAAY,WAE5C,eAAc,IAAI,OAAO;AAI7B,oBAAc,IAAI,OAAO,GACzB,UAAUA,MAAK,QAAQ,OAAO;AAAA,EAElC;AAGA,MAAM,SAAS,MAAM,KAAK,aAAa,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,SAAS,EAAE,MAAM;AAG3E,WAAW,OAAO;AAChB,QAAI;AAIF,OAHgB,MAAMC,SAAQ,GAAG,GAGrB,WAAW,KAAK,CAAC,cAAc,IAAI,GAAG,MAChD,MAAM,OAAO,GAAG,GAChB,OAAO,MAAM,4BAA4BD,MAAK,SAAS,SAAS,GAAG,CAAC,EAAE;AAAA,IAE1E,SAAS,OAAO;AAEd,aAAO,MAAM,+BAA+B,GAAG,KAAK,KAAK,EAAE;AAAA,IAC7D;AAEJ;;;ACzDA,OAAOE,WAAU;AAyBV,SAAS,4BAA4B,SAAiB,WAAkC;AAE7F,MAAM,aAAa,QAAQ,QAAQ,OAAO,GAAG,GAMvC,eAAe,CADA,aAAa,KAAK,UAAU,KACX,gBAAgB,KAAK,UAAU,GAC/DC,eAAc,WAAW,SAAS,GAAG,KAAK,CAAC,cAE7C;AAEJ,MAAIA;AAEF,cAAU,WAAW,QAAQ,OAAO,EAAE;AAAA,OACjC;AAEL,QAAM,YAAY,WAAW,YAAY,GAAG;AAC5C,cAAU,aAAa,IAAI,WAAW,UAAU,GAAG,SAAS,IAAI;AAAA,EAClE;AAGA,MAAM,UAAU,UAAUC,MAAK,KAAK,WAAW,OAAO,IAAI;AAI1D,SAAO,YAAY,YAAY,OAAO;AACxC;AAsBO,SAAS,6BAA6B,WAAgC;AAC3E,MAAM,YAAY,oBAAI,IAAY,GAC5B,YAAY,gBAAgB,QAAW,SAAS;AAEtD,WAAW,YAAY,WAAW;AAChC,QAAM,aAAa,sBAAsB,UAAU,SAAS;AAG5D,QAAI,WAAW,aAAa,WAAW,UAAU,SAAS;AACxD,eAAW,WAAW,WAAW,WAAW;AAC1C,YAAM,gBAAgB,4BAA4B,SAAS,SAAS;AACpE,QAAI,iBACF,UAAU,IAAI,aAAa;AAAA,MAE/B;AAAA,aACS,WAAW,SAAS;AAE7B,UAAM,WAAWA,MAAK,KAAK,WAAW,WAAW,OAAO;AACxD,MAAI,aAAa,aACf,UAAU,IAAI,QAAQ;AAAA,IAE1B;AAAA,EACF;AAEA,SAAO;AACT;;;AH7DA,eAAsB,iBAAiB,SASP;AAC9B,MAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,SAEE,UAAoB,CAAC,GACrB,UAAoB,CAAC;AAE3B,MAAI;AACF,WAAO,EAAE,SAAS,QAAQ;AAM5B,MAAI,sBAAsB;AAE1B,MAAI,gBAAgB;AAElB,QAAM,oBAAoB,eAAe,QAAQ,OAAO,GAAG,GACrD,YAAY,kBAAkB,OAAO,QAAQ,GAC7C,cAAc,YAAY,IAC5B,kBAAkB,MAAM,GAAG,SAAS,IACpC,cAAc,KACZ,oBACA;AAEN,QAAI,aAAa;AACf,4BAAsB,CAAC;AACvB,eAAW,CAAC,KAAK,MAAM,KAAK,OAAO,QAAQ,aAAa,GAAG;AACzD,YAAM,gBAAgB,IAAI,QAAQ,OAAO,GAAG;AAC5C,SAAI,cAAc,WAAW,WAAW,KAAK,YAAY,WAAW,aAAa,OAC/E,oBAAoB,GAAG,IAAI;AAAA,MAE/B;AAAA,IACF;AAAA,EACF;AAKA,MAAM,gBAAgB,yBAAyB,WAAW,GAAG,GACvD,oBAAoB,oBAAI,IAAgD;AAE9E,WAAW,CAAC,WAAW,QAAQ,KAAK,OAAO,QAAQ,mBAAmB;AAEpE,QAAI,eAAc,IAAI,SAAS;AAE/B,UAAI,SAAS,SAAS;AAEpB,iBAAW,WAAW,UAAU;AAC9B,cAAM,SAAS,cAAc,OAAO,GAC9B,SAASC,OAAK,KAAK,MAAM;AAC/B,cAAI;AACF,2BAAiB,WAAW,UAAU,MAAM,GAAG;AAC7C,kBAAM,MAAM,wBAAwB,SAAS,GAAG;AAEhD,gCAAkB,IAAI,KAAK,OAAO;AAAA,YACpC;AAAA,UACF,QAAQ;AAAA,UAER;AAAA,QACF;AAAA;AAGA,iBAAW,WAAW,UAAU;AAC9B,cAAM,SAAS,2BAA2B,cAAc,OAAO,CAAC;AAChE,4BAAkB,IAAI,QAAQ,OAAO;AAAA,QACvC;AAOJ,MAAM,eAAe,oBAAI,IAAY;AAErC,WAAW,CAAC,WAAW,QAAQ,KAAK,OAAO,QAAQ,cAAc;AAC/D,QAAI,SAAS,SAAS;AACpB,eAAW,WAAW,UAAU;AAC9B,YAAM,SAAS,cAAc,OAAO,GAC9B,SAASA,OAAK,KAAK,MAAM;AAC/B,YAAI;AACF,yBAAiB,WAAW,UAAU,MAAM,GAAG;AAC7C,gBAAM,MAAM,wBAAwB,SAAS,GAAG;AAChD,yBAAa,IAAI,GAAG;AAAA,UACtB;AAAA,QACF,QAAQ;AAAA,QAER;AAAA,MACF;AAAA;AAEA,eAAW,WAAW;AACpB,qBAAa,IAAI,2BAA2B,cAAc,OAAO,CAAC,CAAC;AAQzE,MAAI;AAEJ,MAAI;AACF,0BAAsB,iBAAiB,gBAAgB;AAAA;AAEvD,QAAI;AACF,UAAM,eAAe,MAAM,wBAAwB,KAAK,WAAW;AAEnE,6BADY,MAAM,4BAA4B,KAAK,YAAY,GACrC;AAAA,IAC5B,QAAQ;AACN,4BAAsB,oBAAI,IAAI;AAAA,IAChC;AAMF,MAAM,uBAAiC,CAAC;AAExC,WAAW,CAAC,UAAU,WAAW,KAAK;AACpC,QAAI,cAAa,IAAI,QAAQ,KAGzB,qBAAoB,IAAI,QAAQ;AAEpC,UAAI;AACF,YAAM,SAAS,MAAM,kBAAkB,KAAK,aAAa,WAAW;AAEpE,QAAI,OAAO,QAAQ,SAAS,MAC1B,QAAQ,KAAK,GAAG,OAAO,OAAO,GAC9B,qBAAqB;AAAA,UACnB,GAAG,OAAO,QAAQ,IAAI,SAAOA,OAAK,KAAK,GAAG,CAAC;AAAA,QAC7C,IAGE,OAAO,QAAQ,SAAS,KAC1B,QAAQ,KAAK,GAAG,OAAO,OAAO;AAAA,MAElC,SAAS,OAAO;AACd,eAAO;AAAA,UACL,+BAA+B,QAAQ,QAAQ,WAAW,KAAK,KAAK;AAAA,QACtE;AAAA,MACF;AAMF,MAAI,qBAAqB,SAAS;AAChC,QAAI;AACF,UAAM,YAAY,6BAA6B,GAAG;AAClD,YAAM,oBAAoB,KAAK,sBAAsB,SAAS;AAAA,IAChE,SAAS,OAAO;AACd,aAAO,MAAM,mCAAmC,KAAK,EAAE;AAAA,IACzD;AAGF,SAAO,EAAE,SAAS,QAAQ;AAC5B;;;AIvHO,SAAS,wBACd,QACA,WACM;AACN,WAAW,YAAY,WAAW;AAChC,QAAM,MAAM,GAAG,SAAS,UAAU,KAAK,SAAS,OAAO;AACvD,IAAK,OAAO,SAAS,GAAG,KACtB,OAAO,KAAK,GAAG;AAAA,EAEnB;AACF;AAWO,SAAS,qBACd,QACA,QACM;AACN,WAAW,SAAS,QAAQ;AAC1B,QAAM,MAAM,GAAG,MAAM,UAAU,KAAK,MAAM,OAAO;AACjD,IAAK,OAAO,SAAS,GAAG,KACtB,OAAO,KAAK,GAAG;AAAA,EAEnB;AACF;;;A1ChCA,eAAsB,+BACpB,KACA,aACA,SACA,WACA,SACA,aACA,eACA,qBAKA,gBACA,iBACA,qBACA,gBACA,QACA,qBACA,wBACA,YACA,cAC6B;AAC7B,SAAO,MAAM,cAAc,WAAW,IAAI,OAAO,8BAA8B,UAAU,KAAK,IAAI,CAAC,EAAE;AAGrG,MAAM,sBAAsB,eAAe,MAAM,0BAA0B;AAAA,IACzE;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC,GAGK,mBAAuC;AAAA,IAC3C,WAAW;AAAA,IACX,SAAS;AAAA,IACT,SAAS;AAAA,IACT,SAAS;AAAA,IACT,OAAO,CAAC;AAAA,IACR,gBAAgB,CAAC;AAAA,IACjB,cAAc,CAAC;AAAA,IACf,cAAc,CAAC;AAAA,EACjB,GAEM,iBAAiB,oBAAI,IAAY,GACjC,wBAAkC,CAAC,GACrC,uBACE,qBAA+B,CAAC,GAChC,eAAyB,CAAC,GAC1B,YAAsB,CAAC,GACvB,cAAsE,CAAC,GAIvE,iBAAiB,oBAAI,IAAoB,GACzC,qBAAqB,OAAO,WAAoC;AAEpE,QAAI,eAAe,IAAI,MAAM;AAC3B,aAAO,eAAe,IAAI,MAAM;AAGlC,QAAM,YAAYC,OAAK,qBAAqB,MAAM;AAClD,QAAI,CAAC,MAAM,OAAO,SAAS;AACzB,4BAAe,IAAI,QAAQ,MAAM,GAC1B;AAMT,QAAI,eAAe;AACnB,aAAW,CAAC,cAAc,eAAe,KAAK,eAAe,QAAQ,GAAG;AACtE,UAAI,iBAAiB,OAAQ;AAC7B,UAAM,YAAYA,OAAK,qBAAqB,YAAY;AACxD,UAAI,MAAM,OAAO,SAAS;AACxB,YAAI;AACF,cAAM,aAAa,MAAMC,IAAG,KAAK,SAAS,GACpC,aAAa,MAAMA,IAAG,KAAK,SAAS;AAE1C,cAAI,WAAW,QAAQ,WAAW,OAC7B,WAAW,SAAS,WAAW,QAC/B,KAAK,IAAI,WAAW,UAAU,WAAW,OAAO,IAAI;AAGvD,mBAAI,OAAO,WAAW,GAAG,KAAK,CAAC,gBAAgB,WAAW,GAAG,KAC3D,eAAe,QAEf,eAAe,IAAI,cAAc,MAAM,MAC9B,CAAC,OAAO,WAAW,GAAG,KAAK,gBAAgB,WAAW,GAAG,GAClE,eAAe,kBAIjB,eAAe,IAAI,QAAQ,YAAY,GAChC;AAAA,QAEX,QAAQ;AAAA,QAER;AAAA,IAEJ;AAGA,0BAAe,IAAI,QAAQ,MAAM,GAC1B;AAAA,EACT,GAGI,sBAAmD,MACnD,gBAA+E;AACnF,MAAI,CAAC,QAAQ;AACX,QAAI;AACF,4BAAsB,MAAM,mBAAmB,GAAG;AAClD,UAAM,eAAe,oBAAoB,MAAM,WAAW,WAAW;AACrE,MAAI,cAAc,SAAS,OAAO,KAAK,aAAa,KAAK,EAAE,SAAS,MAClE,gBAAgB,aAAa;AAAA,IAEjC,QAAQ;AAAA,IAER;AAOF,MAAI;AACJ,MAAI,UAAU,SAAS,GAAG;AACxB,QAAM,MAAM,0BAA0B,MAAM;AAAA,MAC1C;AAAA,MAAK;AAAA,MAAa,gBAAgB,EAAE,OAAO,cAAc,IAAI;AAAA,MAAM;AAAA,IACrE;AACA,QAAI,yBAAyB,IAAI,0BAA0B,oBAAI,IAAI,GACnE,4BAA4B;AAAA,EAC9B;AAGA,WAAW,YAAY,WAAW;AAEhC,QAAI,mBACA,SAAS;AAEb,QAAK;AAOH,0BAAoB,wBAAwB,aAAa;AAAA,SAPvC;AAElB,UAAM,SAAS,MAAM,qCAAqC,mBAAmB;AAC7E,eAAS,OAAO,QAChB,oBAAoB,OAAO;AAAA,IAC7B;AAKA,QAAM,iBAAqC;AAAA,MACzC;AAAA,MACA,aAAa;AAAA,MACb,eAAe;AAAA,MACf;AAAA,MACA,gBAAgB;AAAA,MAChB,UAAU;AAAA;AAAA,MACV,QAAQ,QAAQ,UAAU;AAAA,MAC1B,eAAe;AAAA,MACf;AAAA;AAAA,MAEA;AAAA,MACA;AAAA;AAAA,MAEA,wBAAwB,6BAA6B;AAAA,MACrD;AAAA,MACA,qBAAqB,uBAAuB;AAAA,IAC9C;AAEA,QAAI;AACF,UAAM,SAAS,MAAM,wBAAwB,gBAAgB,SAAS,kBAAkB,EAAK,GAGvF,oBAAoB,OAAO,eAAe,CAAC;AACjD,eAAW,aAAa;AAItB,YAHA,eAAe,IAAI,SAAS,GAGxB,2BAA2B,wBAAwB;AACrD,cAAM,MAAMC,WAAS,KAAK,SAAS;AACnC,oCAA0B,uBAAuB,IAAI,2BAA2B,GAAG,CAAC;AAAA,QACtF;AAGF,eAAW,CAAC,QAAQ,OAAO,KAAK,OAAO,QAAQ,OAAO,eAAe,CAAC,CAAC,GAAG;AACxE,YAAM,mBAAmB,MAAM,mBAAmB,MAAM;AAExD,oBAAY,gBAAgB,IAAI;AAAA,UAC9B,YAAY,gBAAgB,KAAK,CAAC;AAAA,UAAG;AAAA,QACvC;AAAA,MACF;AAGA,uBAAiB,aAAa,OAAO,gBACrC,iBAAiB,WAAW,GAC5B,iBAAiB,WAAW,OAAO,iBAAiB,OAAO,cAG3D,wBAAwB,cAAc,OAAO,SAAS,GACtD,qBAAqB,WAAW,OAAO,MAAM,GAGzC,OAAO,eACT,sBAAsB,KAAK,GAAG,iBAAiB,GAC3C,OAAO,kBACT,wBAAwB,OAAO,iBAG/B,OAAO,gBAAgB,OAAO,aAAa,SAAS,KACtD,mBAAmB,KAAK,GAAG,OAAO,aAAa,IAAI,SAAOF,OAAK,KAAK,GAAG,CAAC,CAAC,GAEvE,OAAO,kBAAkB,OAAO,eAAe,SAAS,MACrD,iBAAiB,mBACpB,iBAAiB,iBAAiB,CAAC,IAErC,iBAAiB,eAAe,KAAK,GAAG,OAAO,cAAc,IAI3D,OAAO,iBAAiB,KAC1B,OAAO;AAAA,QACL,GAAG,QAAQ,eAAe,OAAO,cAAc,YAC9C,QAAQ,SAAS,eAAe,WAAW,OAAO,YAAY;AAAA,MACjE;AAAA,IAGJ,SAAS,OAAO;AACd,aAAO,MAAM,qBAAqB,WAAW,iBAAiB,QAAQ,KAAK,KAAK,EAAE,GAClF,UAAU,KAAK,GAAG,QAAQ,KAAK,KAAK,EAAE;AAAA,IACxC;AAAA,EACF;AAOA,MAJA,oBAAoB,YAAY,GAChC,iBAAiB,SAAS,GAGtB,iBAAiB,CAAC,QAAQ,QAAQ;AACpC,QAAM,cAAc,MAAM,iBAAiB;AAAA,MACzC;AAAA,MACA;AAAA,MACA;AAAA,MACA,gBAAgB;AAAA,MAChB;AAAA,MACA,QAAQ;AAAA,MACR;AAAA,MACA,kBAAkB,6BAA6B;AAAA,IACjD,CAAC;AAED,qBAAiB,WAAW,YAAY,QAAQ,QAChD,iBAAiB,aAAa,KAAK,GAAG,YAAY,OAAO,IAErD,YAAY,QAAQ,SAAS,KAAK,YAAY,QAAQ,SAAS,MACjE,OAAO;AAAA,MACL,qBAAqB,WAAW,aAAa,YAAY,QAAQ,MAAM,mBAC5D,YAAY,QAAQ,MAAM;AAAA,IACvC;AAAA,EAEJ;AAGA,MAAI,CAAC,QAAQ,QAAQ;AAKnB,QAAM,SAAS,GAAQ,mBAAmB,eAAe,SAAS,GAAG,KAAK,eAAe,SAAS,GAAG,KAAK,eAAe,SAAS,GAAG,KAE/H,gBADY,GAAQ,kBAAkB,eAAe,QAAQ,OAAO,GAAG,EAAE,SAAS,KAAK,KAEzF,eAAgB,QAAQ,OAAO,GAAG,EAAE,QAAQ,WAAW,EAAE,IACzD,QAGE,mBAAmB,uBAAuB,qBAE1C,kBAAmB,kBAAkB,CAAC,SACxCA,OAAK,kBAAkB,cAAc,IACpC,iBAAiB,cAAc,SAAS,IACvCA,OAAK,kBAAkB,aAAa,IACpC;AAEN,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAGA,0BAAiB,QAAQ,MAAM,KAAK,cAAc,GAClD,iBAAiB,iBAAiB,MAAM,KAAK,cAAc,GACvD,sBAAsB,SAAS,MACjC,iBAAiB,aAAa,IAC9B,iBAAiB,gBAAgB,uBACjC,iBAAiB,kBAAkB,wBAEjC,mBAAmB,SAAS,MAC9B,iBAAiB,eAAe,qBAG3B;AACT;AASA,eAAe,6BACb,KACA,aACA,SACA,aACA,aACA,qBAKA,iBACA,qBACA,WACA,eACA,YACA,cACe;AACf,MAAM,mBAAmB,mBAAmB;AAG5C,MAAI,qBAAqB;AACvB,wBAAoB,oBAAoB;AAAA,MACtC;AAAA,MACA,MAAM;AAAA,MACN,SAAS;AAAA,MACT,OAAO;AAAA,MACP,aAAa;AAAA,MACb;AAAA,MACA,WAAW;AAAA,MACX;AAAA,MACA;AAAA,IACF,CAAC;AACD;AAAA,EACF;AAEA,MAAI;AACF,QAAM,WAAW,MAAM,mBAAmB,GAAG;AAG7C,aAAS,MAAM,WAAW,SAAS,MAAM,YAAY,CAAC;AAGtD,QAAM,QAAgE,CAAC;AACvE,aAAW,CAAC,QAAQ,OAAO,KAAK,OAAO,QAAQ,WAAW;AACxD,YAAM,MAAM,IAAI;AAIlB,QAAM,gBAAgB,kBAAkB,aAAa,GAAG,GAIlDG,oBAAmB,mBAAmB,SAGtC,eAAoB;AAAA,MACxB,GAAG,SAAS,MAAM,SAAS,WAAW;AAAA,MACtC,MAAM;AAAA,MACN,SAASA;AAAA,MACT,OAAO,YAAY,KAAK;AAAA,IAC1B;AAGA,IAAI,kBACF,aAAa,YAAY,gBAIvB,aAAa,UAAU,SAAS,MAClC,aAAa,YAAY,YAIvB,wBACF,aAAa,cAAc,sBAIzB,eACF,aAAa,aAAa,aAIxB,iBACF,aAAa,eAAe,eAG9B,SAAS,MAAM,SAAS,WAAW,IAAI,cAEvC,MAAM,oBAAoB,QAAQ,GAClC,OAAO,MAAM,+BAA+B,WAAW,IAAI,OAAO,EAAE;AAGpE,QAAI;AACF,UAAM,aAAa,MAAM,aAAa,GAAG,GACnC,YAA6B;AAAA,QACjC,SAASA;AAAA,QACT,cAAc,aAAa;AAAA,QAC3B,aAAa;AAAA,MACf;AAEA,MAAI,eAAe,CAAC,eAAe,WAAW,MAC5C,UAAU,OAAO,cAEnB,WAAW,SAAS,SAAS,WAAW,IAAI,WAC5C,MAAM,cAAc,UAAU;AAAA,IAChC,SAAS,WAAW;AAClB,aAAO,MAAM,iCAAiC,WAAW,KAAK,SAAS,EAAE;AAAA,IAC3E;AAAA,EACF,SAAS,OAAO;AACd,WAAO,KAAK,wCAAwC,WAAW,KAAK,KAAK,EAAE;AAAA,EAC7E;AACF;;;A2CxdA,eAAsB,oCAAoC,QAAqE;AAC7H,MAAM,EAAE,KAAK,UAAU,WAAW,gBAAgB,SAAS,WAAW,gBAAgB,QAAQ,qBAAqB,uBAAuB,IAAI,QAE1I,iBAAiB,GACjB,eAAe,GACf,eAAe,GACf,eAAe,GACf,cAAc,GACZ,gBAA0B,CAAC,GAC3B,kBAA4B,CAAC,GAC7B,kBAA4B,CAAC,GAC7B,SAAmB,CAAC,GACpB,qBAA+B,CAAC,GAChC,kBAA4B,CAAC,GAC7B,oBAAqC,CAAC;AAE5C,WAAW,YAAY;AACrB,QAAI;AAEF,UAAM,sBAAuB,SAAiB,qBAGxC,iBAAiB,gBAAgB,wBAAwB,IAAI,SAAS,IAAI,KAAK,IAE/E,gBAAoC,MAAM;AAAA,QAC9C;AAAA,QACA,SAAS;AAAA,QACT,SAAS;AAAA,QACT;AAAA,QACA;AAAA,QACA,SAAS;AAAA,QACT,SAAS,IAAI;AAAA,QACb,SAAS;AAAA,QACT;AAAA,QACA,SAAS;AAAA,QACT;AAAA;AAAA,QACA;AAAA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA;AAAA,QACA,OAAO;AAAA,MACT;AAEA,wBAAkB,cAAc,WAChC,gBAAgB,cAAc,SAC9B,gBAAgB,cAAc,SAC9B,gBAAgB,cAAc,SAE9B,cAAc,KAAK,GAAG,cAAc,cAAc,GAClD,gBAAgB,KAAK,GAAG,cAAc,YAAY,GAClD,gBAAgB,KAAK,GAAG,cAAc,YAAY,GAG9C,cAAc,mBAChB,mBAAmB,KAAK,GAAG,cAAc,eAAe,GAEtD,cAAc,gBAChB,gBAAgB,KAAK,GAAG,cAAc,YAAY,GAEhD,cAAc,kBAChB,kBAAkB,KAAK,GAAG,cAAc,cAAc,IAGpD,cAAc,YAAY,KAAK,cAAc,UAAU,KAAK,cAAc,UAAU,MACtF,OAAO,KAAK,2BAA2B,SAAS,IAAI,KAAK,cAAc,SAAS,eAAe,cAAc,OAAO,aAAa,cAAc,OAAO,UAAU;AAAA,IAEpK,SAAS,OAAO;AACd,UAAI,iBAAiB;AACnB,cAAM;AAER,UAAM,WAAW,kCAAkC,SAAS,IAAI,KAAK,KAAK;AAC1E,aAAO,MAAM,QAAQ,GACrB,OAAO,KAAK,QAAQ,GACpB;AAAA,IACF;AAIF,MAAM,kBAAkB;AAAA,IACtB,WAAW,oBAAI,IAAY;AAAA,IAC3B,SAAS,oBAAI,IAAY;AAAA,IACzB,SAAS,oBAAI,IAAY;AAAA,EAC3B,GAGM,wBAAwB,oBAAI,IAAyC;AAE3E,WAAW,YAAY;AACrB,QAAI;AACF,UAAM,cAAc,MAAM;AAAA,QACxB,SAAS;AAAA,QACT,SAAS;AAAA,QACT;AAAA,QACA,SAAS;AAAA,QACT;AAAA,MACF,GACM,gBAAgB,MAAM;AAAA,QAC1B;AAAA,QACA,SAAS;AAAA,QACT,YAAY;AAAA,QACZ;AAAA,MACF;AAOA,UALA,cAAc,QAAQ,QAAQ,UAAQ,gBAAgB,UAAU,IAAI,IAAI,CAAC,GACzE,cAAc,QAAQ,QAAQ,UAAQ,gBAAgB,QAAQ,IAAI,IAAI,CAAC,GACvE,cAAc,QAAQ,QAAQ,UAAQ,gBAAgB,QAAQ,IAAI,IAAI,CAAC,GAGnE,CAAC,QAAQ,QAAQ;AACnB,YAAM,gBAAgB,CAAC,GAAG,cAAc,SAAS,GAAG,cAAc,OAAO;AACzE,QAAI,cAAc,SAAS,KACzB,sBAAsB,IAAI,SAAS,MAAM,EAAE,cAAc,CAAC;AAAA,MAE9D;AAAA,IACF,SAAS,OAAO;AACd,UAAI,iBAAiB;AACnB,cAAM;AAER,UAAM,WAAW,gCAAgC,SAAS,IAAI,KAAK,KAAK;AACxE,aAAO,MAAM,QAAQ,GACrB,OAAO,KAAK,QAAQ,GACpB;AAAA,IACF;AAIF,MAAI,CAAC,QAAQ,UAAU,sBAAsB,OAAO;AAClD,QAAI;AAEF,eAAW,CAAC,aAAa,EAAE,cAAc,CAAC,KAAK,uBAAuB;AACpE,YAAM,QAAgE,CAAC;AACvE,iBAAW,KAAK;AACd,gBAAM,CAAC,IAAI,CAAC,EAAE,QAAQ,GAAG,OAAO,aAAa,MAAM,CAAC,WAAW,EAAE,CAAC;AAEpE,4BAAoB,uBAAuB,EAAE,aAAa,MAAM,CAAC;AAAA,MACnE;AAAA;AAEA,UAAI;AACF,YAAM,WAAW,MAAM,mBAAmB,GAAG;AAC7C,iBAAS,MAAM,WAAW,SAAS,MAAM,YAAY,CAAC;AACtD,iBAAW,CAAC,aAAa,EAAE,cAAc,CAAC,KAAK,uBAAuB;AACpE,cAAM,QAAQ,SAAS,MAAM,SAAS,WAAW;AACjD,cAAI,CAAC,MAAO;AACZ,cAAM,QAAQ,EAAE,GAAI,MAAM,SAAS,CAAC,EAAG;AACvC,mBAAW,KAAK,eAAe;AAC7B,gBAAM,WAAsC,EAAE,QAAQ,GAAG,OAAO,aAAa,MAAM,CAAC,WAAW,EAAE,GAC3F,WAAW,MAAM,CAAC,KAAK,CAAC;AAC9B,kBAAM,CAAC,IAAI,mBAAmB,UAAU,CAAC,QAAQ,CAAC;AAAA,UACpD;AACA,mBAAS,MAAM,SAAS,WAAW,IAAI,EAAE,GAAG,OAAO,MAAM;AAAA,QAC3D;AACA,cAAM,oBAAoB,QAAQ,GAClC,OAAO,MAAM,iDAAiD,sBAAsB,IAAI,aAAa;AAAA,MACvG,SAAS,OAAO;AACd,eAAO,KAAK,sDAAsD,KAAK,EAAE;AAAA,MAC3E;AAKJ,MAAM,WAAW,IAAI,IAAI,aAAa,GAChC,aAAa,IAAI,IAAI,eAAe,GACpC,uBAAuB,MAAM,KAAK,gBAAgB,SAAS,EAAE;AAAA,IACjE,OAAK,CAAC,SAAS,IAAI,CAAC;AAAA,EACtB,GACM,qBAAqB,MAAM,KAAK,gBAAgB,OAAO,EAAE;AAAA,IAC7D,OAAK,CAAC,WAAW,IAAI,CAAC;AAAA,EACxB;AAEA,SAAO;AAAA,IACL,gBAAgB;AAAA,IAChB,cAAc;AAAA,IACd,YAAY;AAAA,IACZ;AAAA,IACA,QAAQ,OAAO,SAAS,IAAI,SAAS;AAAA,IACrC;AAAA,IACA,iBAAiB;AAAA,MACf,WAAW;AAAA,MACX,SAAS;AAAA,MACT,SAAS,MAAM,KAAK,gBAAgB,OAAO;AAAA,IAC7C;AAAA,IACA,uBAAuB,iBAAiB;AAAA,IACxC,YAAY,mBAAmB,SAAS,KAAK;AAAA,IAC7C,iBAAiB,mBAAmB,SAAS,IAAI,qBAAqB;AAAA,IACtE,gBAAgB,kBAAkB,SAAS,IAAI,oBAAoB;AAAA,IACnE,cAAc,gBAAgB,SAAS,IAAI,kBAAkB;AAAA,EAC/D;AACF;;;AC3PA,SAAS,QAAAC,cAAY;;;ACYrB,eAAsB,gBAAgB,WAAwC;AAC5E,MAAM,oBAAoB,MAAM,qBAAqB,SAAS;AAE9D,SAAI,kBAAkB,SAAS,KAC7B,OAAO,MAAM,4BAA4B,kBAAkB,KAAK,IAAI,CAAC,EAAE,GAGlE;AACT;AAKA,eAAsB,2BACpB,QACA,QACqB;AACrB,MAAM,MAAM,UAAU,cAAc,GAC9B,MAAM,UAAU,cAAc;AAEpC,MAAI,KAAK,oBAAoB,GAC7B,IAAI,KAAK,sDAAsD;AAE/D,MAAM,UAAU,OAAO,OAAO,uBAAuB,CAAC,EAAE,IAAI,CAAC,cAAkC;AAAA,IAC7F,OAAO,SAAS;AAAA,IAChB,OAAO,SAAS;AAAA,EAClB,EAAE,GAEI,WAAW,MAAM,IAAI;AAAA,IACzB;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,SAAO,WAAW,CAAC,QAAoB,IAAI,CAAC;AAC9C;;;AD7BA,eAAsB,iBACpB,KACA,WACA,UAA+E,CAAC,GAC3D;AACrB,MAAM,YAAY,QAAQ,gBAAgB,IAGpC,aAAa,mBAAmB,SAAS;AAC/C,MAAI,cAAc,WAAW,SAAS,GAAG;AACvC,QAAM,WAAW,WAAW,IAAI,UAAQ,oBAAoB,IAAI,CAAC,GAC3D,eAAe,SAAS,UAAU,cAAY,CAAC,QAAQ;AAC7D,QAAI,iBAAiB;AACnB,YAAM,IAAI,MAAM,YAAY,WAAW,YAAY,CAAC,YAAY;AAElE,WAAO;AAAA,EACT;AAGA,MAAM,oBAAoB,MAAM,sBAAsB,GAAG;AACzD,MAAI,kBAAmB,QAAO;AAG9B,MAAM,OAAO,MAAM,gBAAgB,GAAG;AACtC,SAAI,KAAK,SAAS,IAAU,OAGxB,YACe,MAAM,2BAA2B,QAAQ,QAAQ,QAAQ,MAAM,IAK3E,CAAC,QAAQ;AAClB;AAMA,eAAe,sBAAsB,KAAyC;AAC5E,MAAI;AACJ,MAAI;AACF,QAAM,UAAU,uBAAuB,GAAG,GACpC,eAAeC,OAAK,SAAS,cAAc,eAAe;AAChE,eAAW,MAAM,gBAAgB,YAAY;AAAA,EAC/C,QAAQ;AAEN,kBAAO,MAAM,mEAAmE,GACzE;AAAA,EACT;AAEA,MAAI,CAAC,SAAS,aAAa,SAAS,UAAU,WAAW,EAAG,QAAO;AAGnE,MAAM,WAAW,SAAS,UAAU,IAAI,UAAQ,oBAAoB,IAAI,CAAC,GACnE,eAAe,SAAS,UAAU,OAAK,CAAC,CAAC;AAC/C,MAAI,iBAAiB;AACnB,UAAM,IAAI,MAAM,YAAY,SAAS,UAAU,YAAY,CAAC,wBAAwB;AAEtF,gBAAO,MAAM,6BAA6B,SAAS,SAAS,GACrD;AACT;;;AE/CA,eAAsB,yBACpB,KAC0B;AAE1B,MAAI,IAAI,UAAU,WAAW,GAAG;AAC9B,QAAM,YAAY,IAAI,UAAU,mBAAmB,0BAA6B,KAAK;AACrF,QAAI,YAAY,MAAM;AAAA,MACpB,IAAI;AAAA,MACJ,IAAI,QAAQ;AAAA,MACZ,EAAE,aAAa,WAAW,QAAQ,cAAc,IAAI,SAAS,GAAG,QAAQ,cAAc,IAAI,SAAS,EAAE;AAAA,IACvG;AAAA,EACF;AAGA,MAAM,iBAAiB,IAAI,gBAGrB,UAAU,MAAM,oCAAoC;AAAA,IACxD,KAAK,IAAI;AAAA,IACT,UAAU,IAAI;AAAA,IACd,WAAW,IAAI;AAAA,IACf;AAAA,IACA,SAAS,IAAI;AAAA,IACb,WAAW,IAAI;AAAA,IACf,gBAAgB,IAAI;AAAA;AAAA,IACpB,QAAQ,IAAI,WAAW;AAAA,IACvB,qBAAqB,IAAI,WAAW;AAAA,IACpC,wBAAwB,IAAI,WAAW;AAAA,IACvC,cAAc,IAAI;AAAA,EACpB,CAAC;AAGD,UAAQ,QAAQ,QAAQ,OAAK,IAAI,OAAO,KAAK,CAAC,CAAC;AAE/C,MAAM,YAAY,QAAQ,aAAa,GACjC,oBACJ,QAAQ,cAAc,SAAS,KAC/B,QAAQ,gBAAgB,SAAS,KACjC,QAAQ,gBAAgB,UAAU,SAAS,KAC3C,QAAQ,gBAAgB,QAAQ,SAAS;AAG3C,MAAI,qBAAqB,IAAI,UAAU;AACrC,aAAW,OAAO,IAAI,kBAAkB;AAGtC,UAAM,EAAE,UAAU,cAAc,iBAAiB,IAAI,6BAA6B,IAAI,IAAI,GAIpF,eAAe,IAAI,kBAAkB,kBAGvC;AACJ,MAAI,IAAI,sBACN,eAAe,WACN,iBAEL,aAAa,SAAS,OAAO,IAC/B,eAAe,UACN,aAAa,SAAS,OAAO,MACtC,eAAe;AAKnB,UAAM,eAAe,eACjB,aAAa,MAAM,GAAG,EAAE,IAAI,GAAG,QAAQ,gBAAgB,EAAE,KAAK,IAAI,KAAK,MAAM,GAAG,EAAE,IAAI,KAAK,IAAI,OAC/F,IAAI,KAAK,MAAM,GAAG,EAAE,IAAI,KAAK,IAAI;AAErC,UAAI,UAAU,mBAAmB,cAAc;AAAA,QAC7C,aAAa;AAAA;AAAA,QACb,SAAS,IAAI;AAAA,QACb;AAAA;AAAA,QACA;AAAA,QACA;AAAA,QACA,iBAAiB,IAAI,qBAAqB;AAAA,QAC1C,YAAY,IAAI,qBAAqB;AAAA,MACvC,CAAC;AAAA,IACH;AAGF,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,IACA;AAAA,IACA,QAAQ,QAAQ;AAAA,EAClB;AACF;;;ACjHA,eAAsB,oBAAoB,KAAyC;AACjF,MAAM,cAAc,IAAI,iBAAiB,KAAK,SAAO,IAAI,MAAM;AAE/D,MAAI,CAAC,aAAa;AAChB,WAAO,KAAK,sEAAsE;AAClF;AAAA,EACF;AAEA,MAAI;AAEF,QAAM,SAAS,oBAAoB,KAAK,WAAW;AAEnD,UAAM;AAAA,MACJ,IAAI;AAAA,MACJ,IAAI,OAAO;AAAA,MACX,YAAY;AAAA,MACZ,IAAI,QAAQ,OAAO;AAAA,MACnB,OAAO;AAAA,MACP,OAAO;AAAA,MACP,OAAO;AAAA,MACP,OAAO;AAAA,MACP,OAAO;AAAA,MACP,OAAO;AAAA,IACT,GAEA,OAAO,KAAK,wBAAwB,IAAI,OAAO,WAAW,EAAE;AAAA,EAE9D,SAAS,OAAO;AACd,WAAO,KAAK,8BAA8B,KAAK,EAAE;AAAA,EAEnD;AACF;AAEO,SAAS,oBAAoB,KAA0B,aAAkB;AAC9E,MAAM,SAAc;AAAA,IAClB,OAAO;AAAA,IACP,OAAO;AAAA,IACP,MAAM;AAAA;AAAA,IACN,cAAc;AAAA;AAAA,IACd,QAAQ;AAAA,IACR,QAAQ;AAAA,EACV;AAIA,MAAI,IAAI,OAAO;AACb,kBAAO,SAAS,IAAI,OAAO,kBAAkB,QAC7C,OAAO,SAAS,IAAI,OAAO,kBAAkB,QAE7C,OAAO,OAAO,IAAI,OAAO,kBAAkB,SAC3C,OAAO,eAAe,IAAI,OAAO,cAC1B;AAKT,MAAI,IAAI,OAAO;AAEb,WAAO;AAQT,UAJI,IAAI,iBACN,OAAO,OAAO,IAAI,eAGZ,IAAI,OAAO,MAAM;AAAA,IACvB,KAAK;AAEH,aAAO,QAAQ,IAAI,OAAO;AAC1B;AAAA,IAEF,KAAK;AAEH,aAAO,OAAO,kBAAkB,IAAI,OAAO,aAAa,IAAI,IAAI,SAAS;AACzE;AAAA,IAEF,KAAK;AAEH,aAAO,SAAS,IAAI,OAAO,QAC3B,OAAO,SAAS,IAAI,OAAO,QAC3B,OAAO,OAAO,IAAI,OAAO,WAAW,QACpC,OAAO,eAAe,IAAI,OAAO;AACjC;AAAA,IAEF,KAAK;AAEH;AAAA,EACJ;AAEA,SAAO;AACT;;;ACnDA,SAAS,eAAe,OAAiB,QAAoB,SAAiB,MAAY;AACxF,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,QAAM,YAAY,iBAAiB,MAAM,MAAM,SAAS,CAAC;AACzD,WAAO,KAAK,GAAG,MAAM,GAAG,SAAS,GAAG,MAAM,CAAC,CAAC,EAAE;AAAA,EAChD;AACF;AAMA,SAAS,eACP,OACA,OACA,QACA,aACM;AACN,MAAI,aAAa;AAEf,QAAM,eAAe,MAAM,MAAM,GAAG,EAAU,GACxC,OAAO,MAAM,SAAS,KAAa;AAAA,UAAa,MAAM,SAAS,EAAU,UAAU;AACzF,WAAO,KAAK,aAAa,KAAK;AAAA,CAAI,IAAI,MAAM,KAAK;AAAA,EACnD;AACE,WAAO,QAAQ,KAAK,GACpB,eAAe,OAAO,MAAM;AAEhC;AAMO,SAAS,2BAA2B,MAAyB,SAAqB,cAAc,GAAS;AAC9G,MAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,sBAAsB;AAAA,IACtB;AAAA,IACA;AAAA,IACA;AAAA,IACA,cAAc;AAAA,IACd;AAAA,EACF,IAAI,MAEE,gBAAgB,IAAI,IAAI,mBAAmB,CAAC,CAAC,GAC7C,aAAa,IAAI,IAAI,gBAAgB,CAAC,CAAC,GACvC,MAAM,CAAC,SAAiB,UAAU,IAAI,WAGtC,YAAa,cAAc,aAAa,KAAM,IAC9C,oBAAqB,kBAAkB,eAAe,SAAS,KAC1C,gBAAgB,aAAa,SAAS,KACtC,oBAAoB,gBAAgB,UAAU,SAAS,KAAK,gBAAgB,QAAQ,SAAS;AAExH,MAAI,aAAa,CAAC,mBAAmB;AAGnC,QADA,OAAO,MAAM,qBAAqB,WAAW,GAAG,cAAc,IAAI,YAAY,OAAO,KAAK,EAAE,EAAE,GAC1F,UAAU,OAAO,SAAS,GAAG;AAC/B,aAAO,MAAM,sBAAsB;AACnC,eAAW,SAAS;AAClB,eAAO,KAAK,aAAQ,KAAK,EAAE;AAAA,IAE/B;AACA;AAAA,EACF;AAGA,MAAI,CAAC,qBAAqB,CAAC,WAAW;AACpC,QAAI,cAAc,aAAa,WAAW;AAC1C,IAAI,gBACF,eAAe,IAAI,YAAY,OAAO,KAExC,eAAe,oBACf,OAAO,QAAQ,GAAG,WAAW,EAAE,GAC/B,OAAO,KAAK,yFAAyF,GACjG,uBACF,OAAO,KAAK,sDAAsD;AAEpE;AAAA,EACF;AAGA,MAAI,CAAC,KAAK,gBAAgB;AACxB,QAAI,aAAa,aAAa,WAAW;AACzC,IAAI,aAAa,YACf,cAAc,IAAI,YAAY,OAAO,KAEvC,OAAO,QAAQ,UAAU;AAAA,EAC3B;AAGA,MAAM,qBAAqB,iBAAiB,OAAO,OAAK,CAAC,EAAE,MAAM;AACjE,MAAI,mBAAmB,SAAS,GAAG;AACjC,WAAO,QAAQ,2BAA2B,mBAAmB,MAAM,EAAE;AACrE,QAAM,WAAW,mBAAmB,IAAI,SAK/B,GAHL,OAAO,IAAI,QAAS,aAAa,IAAI,KAAK,WAAW,GAAG,KAAK,IAAI,KAAK,WAAW,KAAK,KAClF,IAAI,OACJ,IAAI,IAAI,IAAI,EACQ,IAAI,IAAI,OAAO,EAC1C;AACD,mBAAe,UAAU,MAAM;AAAA,EACjC;AAMA,MALI,iBAAiB,SAAS,KAC5B,OAAO,QAAQ,6BAA6B,iBAAiB,MAAM,EAAE,GAInE,kBAAkB,eAAe,SAAS,GAAG;AAC/C,QAAM,SAAS,oBAAoB,eAAe,MAAM,IAClD,cAAc,CAAC,GAAG,cAAc,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,CAAC,CAAC;AACzE,mBAAe,YAAY,IAAI,OAAK;AAClC,UAAM,UAAU,qBAAqB,CAAC;AACtC,aAAO,cAAc,IAAI,CAAC,IAAI,GAAG,OAAO,IAAI,IAAI,cAAc,CAAC,KAC3D,WAAW,IAAI,CAAC,IAAI,GAAG,OAAO,IAAI,IAAI,WAAW,CAAC,KAClD;AAAA,IACN,CAAC,GAAG,QAAQ,QAAQ,WAAW;AAAA,EACjC;AAGA,MAAI,gBAAgB,aAAa,SAAS,GAAG;AAC3C,QAAM,SAAS,kBAAkB,aAAa,MAAM,IAC9C,cAAc,CAAC,GAAG,YAAY,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,CAAC,CAAC;AACvE,mBAAe,YAAY,IAAI,OAAK;AAClC,UAAM,UAAU,qBAAqB,CAAC;AACtC,aAAO,cAAc,IAAI,CAAC,IAAI,GAAG,OAAO,IAAI,IAAI,cAAc,CAAC,KAC3D,WAAW,IAAI,CAAC,IAAI,GAAG,OAAO,IAAI,IAAI,WAAW,CAAC,KAClD;AAAA,IACN,CAAC,GAAG,QAAQ,QAAQ,WAAW;AAAA,EACjC;AAGA,MAAI,kBAAkB,eAAe,SAAS,GAAG;AAC/C,QAAM,QAAQ,eAAe;AAAA,MAC3B,OAAK,GAAG,qBAAqB,EAAE,IAAI,CAAC,WAAM,qBAAqB,EAAE,EAAE,CAAC;AAAA,IACtE;AACA,mBAAe,OAAO,oBAAoB,eAAe,MAAM,IAAI,QAAQ,WAAW;AAAA,EACxF;AAGA,MAAI,qBAAqB,kBAAkB,SAAS,GAAG;AACrD,QAAM,QAAQ,kBAAkB,QAC1B,SAAS,YAAY,KAAK,iCAAiC,UAAU,IAAI,KAAK,GAAG;AACvF,IAAI,cACF,OAAO,KAAK,kBAAkB,KAAK;AAAA,CAAI,GAAG,MAAM,KAEhD,OAAO,QAAQ,MAAM,GACrB,eAAe,mBAAmB,MAAM;AAAA,EAE5C;AAGA,MAAI,iBAAiB;AACnB,QAAM,iBAAiB,gBAAgB,UAAU,SAAS,gBAAgB,QAAQ;AAClF,QAAI,iBAAiB,GAAG;AACtB,UAAM,YAAsB,CAAC;AAC7B,UAAI,gBAAgB,UAAU,SAAS,GAAG;AACxC,YAAM,kBAAkB,CAAC,GAAG,gBAAgB,SAAS,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,CAAC,CAAC;AACxF,iBAAW,QAAQ;AACjB,oBAAU,KAAK,GAAG,qBAAqB,IAAI,CAAC,YAAY;AAAA,MAE5D;AACA,UAAI,gBAAgB,QAAQ,SAAS,GAAG;AACtC,YAAM,gBAAgB,CAAC,GAAG,gBAAgB,OAAO,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,cAAc,CAAC,CAAC;AACpF,iBAAW,QAAQ;AACjB,oBAAU,KAAK,GAAG,qBAAqB,IAAI,CAAC,YAAY;AAAA,MAE5D;AACA,qBAAe,WAAW,eAAe,cAAc,YAAY,QAAQ,WAAW;AAAA,IACxF;AAAA,EACF;AAcA,MAXI,eAAe,QAAQ,SAAS,KAClC,OAAO,QAAQ,iCAAiC,eAAe,QAAQ,KAAK,IAAI,CAAC,EAAE,GAIjF,aAAa,UAAU,OAAO,SAAS,MACzC,OAAO,KAAK,+BAA+B,OAAO,MAAM,EAAE,GAC1D,eAAe,QAAQ,MAAM,IAI3B,mBAAmB,gBAAgB,SAAS,GAAG;AACjD,WAAO,KAAK,gCAAgC;AAC5C,aAAW,WAAW,iBAAiB;AACrC,UAAM,cAAc,8BAA8B,yBAAyB,OAAO,CAAC;AACnF,aAAO,KAAK,aAAQ,OAAO,KAAK,WAAW,GAAG;AAAA,IAChD;AACA,WAAO,KAAK,4CAAqC,GACjD,OAAO,KAAK,mDAA8C,GAC1D,OAAO,KAAK,qDAAgD,gBAAgB,KAAK,GAAG,CAAC,EAAE,GACvF,OAAO,KAAK,uCAAkC,GAC9C,OAAO,KAAK,EAAE;AAAA,EAChB;AACF;AAEA,SAAS,8BAA8B,SAAkD;AACvF,MAAI,CAAC;AACH,WAAO;AAGT,UAAQ,QAAQ,QAAQ;AAAA,IACtB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO,yBAAyB,QAAQ,WAAW,eAAe;AAAA,EACtE;AACF;;;ACrQA,eAAsB,mBACpB,KACA,eAC8D;AAC9D,MAAM,cAAc,IAAI,iBAAiB,KAAK,SAAO,IAAI,MAAM,GAGzD,sBAAsB,IAAI,OAAO,SAAS,aAC1C,aAAgC;AAAA,IACpC,aAAa,IAAI,OAAO;AAAA,IACxB,kBAAkB,IAAI;AAAA,IACtB,gBAAgB,EAAE,WAAW,IAAI,WAAW,SAAS,CAAC,EAAE;AAAA,IACxD,SAAS,IAAI;AAAA,IACb;AAAA,IACA,gBAAgB,cAAc;AAAA,IAC9B,cAAc,cAAc;AAAA,IAC5B,iBAAiB,cAAc;AAAA,IAC/B,iBAAiB,CAAC;AAAA,IAClB,wBAAwB,CAAC;AAAA,IACzB,YAAY,cAAc;AAAA,IAC1B,QAAQ,cAAc;AAAA,IACtB;AAAA,IACA,YAAY,cAAc;AAAA,IAC1B,iBAAiB,cAAc;AAAA,IAC/B,gBAAgB,cAAc;AAAA,IAC9B,cAAc,cAAc;AAAA,IAC5B,aAAa,IAAI,UAAU,eAAe;AAAA,IAC1C,mBAAmB,IAAI;AAAA,EACzB;AAEA,SAAI,IAAI,kBAEC;AAAA,IACL,SAAS;AAAA,IACT,MAAM;AAAA,MACJ,aAAa,IAAI,OAAO;AAAA,MACxB,WAAW,kBAAkB,IAAI,SAAS;AAAA,MAC1C,kBAAkB,IAAI;AAAA,MACtB,eAAe,IAAI,iBAAiB;AAAA,MACpC,WAAW,cAAc;AAAA,MACzB,SAAS,cAAc;AAAA,MACvB,uBAAuB,cAAc,iBAAiB,cAAc,gBAAgB;AAAA,IACtF;AAAA,IACA,UAAU,IAAI,SAAS,SAAS,IAAI,MAAM,KAAK,IAAI,IAAI,IAAI,QAAQ,CAAC,IAAI;AAAA,IACxE,aAAa;AAAA,EACf,KAGF,2BAA2B,YAAY,cAAc,IAAI,SAAS,CAAC,GAE5D;AAAA,IACL,SAAS;AAAA,IACT,MAAM;AAAA,MACJ,aAAa,IAAI,OAAO;AAAA,MACxB,WAAW,kBAAkB,IAAI,SAAS;AAAA,MAC1C,kBAAkB,IAAI;AAAA,MACtB,eAAe,IAAI,iBAAiB;AAAA,MACpC,WAAW,cAAc;AAAA,MACzB,SAAS,cAAc;AAAA,MACvB,uBAAuB,cAAc,iBAAiB,cAAc,gBAAgB;AAAA,IACtF;AAAA,IACA,UAAU,IAAI,SAAS,SAAS,IAAI,MAAM,KAAK,IAAI,IAAI,IAAI,QAAQ,CAAC,IAAI;AAAA,EAC1E;AACF;;;AC1EA,OAAOC,WAAU;;;ACQjB,OAAOC,WAAU;AA2BjB,eAAsB,gBACpB,WACA,OACA,aACqB;AACrB,MAAI,kBAAkB,GAEhB,WAAW,cACZ,MAAM,SAAS,WAAW,IAAI,EAAE,CAAC,WAAW,GAAG,MAAM,SAAS,WAAW,EAAE,IAAI,CAAC,IACjF,MAAM;AAEV,WAAW,OAAO,OAAO,OAAO,QAAQ;AACtC,QAAK,IAAI;AAET,eAAW,CAAC,WAAW,OAAO,KAAK,OAAO,QAAQ,IAAI,KAAK,GAAG;AAC5D,YAAI,CAAC,MAAM,QAAQ,OAAO,KAAK,QAAQ,WAAW,EAAG;AAErD,YAAM,OAAO,CAAC;AACd,iBAAW,WAAW,SAAS;AAC7B,cAAM,aAAa,cAAc,OAAO,GAClC,YAAYC,MAAK,KAAK,WAAW,UAAU;AACjD,UAAI,MAAM,OAAO,SAAS,IACxB,KAAK,KAAK,OAAO,KAEjB,mBACA,OAAO,MAAM,yBAAyB,UAAU,2BAA2B;AAAA,QAE/E;AAEA,QAAI,KAAK,WAAW,IAClB,OAAO,IAAI,MAAM,SAAS,IACjB,KAAK,SAAS,QAAQ,WAC/B,IAAI,MAAM,SAAS,IAAI;AAAA,MAE3B;AAGF,SAAO,EAAE,QAAQ,kBAAkB,GAAG,gBAAgB;AACxD;AAQA,eAAsB,oBACpB,WACA,OACA,WACA,aACqB;AACrB,MAAI;AACF,QAAM,SAAS,MAAM,gBAAgB,WAAW,OAAO,WAAW;AAClE,WAAI,OAAO,UACT,MAAM,oBAAoB,EAAE,MAAM,WAAW,MAAM,CAAC,GAE/C;AAAA,EACT,SAAS,OAAO;AACd,kBAAO,KAAK,yBAAyB,KAAK,EAAE,GACrC,EAAE,QAAQ,IAAO,iBAAiB,EAAE;AAAA,EAC7C;AACF;;;ACtDO,SAAS,0BAA0B,OAAuB,aAA2B;AAC1F,EAAI,MAAM,YAAY,MAAM,SAAS,WAAW,KAC9C,OAAO,MAAM,SAAS,WAAW;AAErC;AAEO,SAAS,6BACd,OACA,aACA,oBACM;AACN,MAAM,MAAM,MAAM,WAAW,WAAW;AACxC,MAAK,KAEL;AAAA,aAAW,OAAO;AAChB,aAAO,IAAI,MAAM,GAAG;AAGtB,IAAI,OAAO,KAAK,IAAI,KAAK,EAAE,WAAW,KACpC,OAAO,MAAM,SAAS,WAAW;AAAA;AAErC;;;AC3DA,SAAS,QAAAC,cAAY;AAQrB,SAAS,oBAAoB,SAAiB,aAA4D;AACxG,MAAI,CAAC,QAAS,QAAO,EAAE,SAAS,IAAO,QAAQ;AAC/C,MAAM,SAAS,qBAAqB,WAAW,GACzC,UAAU,oBACV,YAAY,OAAO,KAAK,OAAO;AACrC,MAAI,CAAC,UAAW,QAAO,EAAE,SAAS,IAAO,QAAQ;AACjD,MAAM,SAAS,QAAQ,MAAM,GAAG,UAAU,KAAK,GACzC,OAAO,QAAQ,MAAM,UAAU,QAAQ,UAAU,CAAC,EAAE,MAAM,GAC1D,aAAa,QAAQ,KAAK,IAAI;AACpC,MAAI,CAAC,WAAY,QAAO,EAAE,SAAS,IAAO,QAAQ;AAClD,MAAM,QAAQ,KAAK,MAAM,WAAW,QAAQ,WAAW,CAAC,EAAE,MAAM;AAChE,SAAO,EAAE,SAAS,IAAM,SAAS,SAAS,MAAM;AAClD;AAGA,SAAS,6BAA6B,SAAiB,cAA+D;AACpH,MAAI,UAAU,IACV,UAAU;AACd,WAAW,QAAQ,cAAc;AAC/B,QAAM,SAAS,oBAAoB,SAAS,IAAI;AAChD,IAAI,OAAO,YAAS,UAAU,KAC9B,UAAU,OAAO;AAAA,EACnB;AACA,SAAO,EAAE,SAAS,SAAS,QAAQ;AACrC;AAGA,SAAS,yBAAmC;AAC1C,MAAM,MAAM,oBAAI,IAAY,CAAC,cAAc,SAAS,CAAC;AACrD,WAAW,YAAY,gBAAgB,GAAG;AACxC,QAAM,MAAM,sBAAsB,QAAQ;AAC1C,IAAI,IAAI,YAAU,IAAI,IAAI,IAAI,QAAQ;AAAA,EACxC;AACA,SAAO,MAAM,KAAK,GAAG;AACvB;AAOA,eAAsB,wBACpB,WACA,cACA,UAAgC,CAAC,GACD;AAChC,MAAM,UAAoB,CAAC,GACrB,YAAY,uBAAuB;AAEzC,WAAW,YAAY,WAAW;AAChC,QAAM,UAAUC,OAAK,WAAW,QAAQ;AACxC,QAAI,CAAE,MAAM,OAAO,OAAO,EAAI;AAE9B,QAAM,WAAW,MAAM,aAAa,OAAO,GACrC,EAAE,SAAS,QAAQ,IAAI,6BAA6B,UAAU,YAAY;AAChF,IAAK,YAEA,QAAQ,WACX,MAAM,cAAc,SAAS,OAAO,GACpC,OAAO,MAAM,sBAAsB,OAAO,EAAE,IAG9C,QAAQ,KAAK,QAAQ;AAAA,EACvB;AAEA,SAAO,EAAE,QAAQ;AACnB;;;AH7CA,SAAS,cAAc,KAAa,WAAiC;AACnE,MAAM,aAAa,IAAI,QAAQ,OAAO,GAAG;AACzC,SAAO,UAAU,IAAI,UAAU;AACjC;AAEA,eAAe,oBACb,cACA,WACA,aACA,WACA,UAAsC,CAAC,GACH;AACpC,MAAM,UAAoB,CAAC,GACrB,UAAoB,CAAC,GACrB,YAAY,oBAAI,IAAY;AAElC,WAAW,CAAC,QAAQ,QAAQ,KAAK,OAAO,QAAQ,gBAAgB,CAAC,CAAC,GAAG;AACnE,QAAI,CAAC,MAAM,QAAQ,QAAQ,KAAK,SAAS,WAAW,EAAG;AAIvD,QAFc,SAAS,MAAM,GAElB;AACT,eAAW,WAAW,UAAU;AAC9B,YAAM,aAAa,cAAc,OAAO,GAClC,SAASC,MAAK,KAAK,WAAW,UAAU;AAC9C,YAAM,MAAM,OAAO,MAAM;AAEzB,cAAI,QAAQ;AACV,2BAAiB,YAAY,UAAU,MAAM;AAC3C,cAAK,UAAU,IAAI,QAAQ,MACzB,UAAU,IAAI,QAAQ,GACtB,QAAQ,KAAK,QAAQ;AAAA,eAGpB;AACL,gBAAM,SAAS,MAAM,kBAAkB,WAAW,SAAS,WAAW;AACtE,oBAAQ,KAAK,GAAG,OAAO,OAAO,GAC9B,QAAQ,KAAK,GAAG,OAAO,OAAO;AAAA,UAChC;AAAA,MACF;AACA;AAAA,IACF;AAEA,QAAI,eAAc,QAAQ,SAAS;AAInC,eAAW,WAAW,UAAU;AAC9B,YAAM,aAAa,cAAc,OAAO,GAClC,UAAUA,MAAK,KAAK,WAAW,UAAU;AAE/C,YAAI,QAAQ;AACV,UAAK,UAAU,IAAI,OAAO,MACxB,UAAU,IAAI,OAAO,GACrB,QAAQ,KAAK,OAAO;AAAA,aAEjB;AACL,cAAM,SAAS,MAAM,kBAAkB,WAAW,SAAS,WAAW;AACtE,kBAAQ,KAAK,GAAG,OAAO,OAAO,GAC9B,QAAQ,KAAK,GAAG,OAAO,OAAO;AAAA,QAChC;AAAA,MACF;AAAA,EACF;AAEA,SAAO,EAAE,SAAS,QAAQ;AAC5B;AAOA,eAAsB,qBACpB,aACA,UAA4B,CAAC,GAC7B,aACiD;AAEjD,MAAM,YAAY,YAAY,WACxB,aAAa,uBAAuB,SAAS,GAC7C,eAAe,uBAAuB,SAAS;AAErD,MAAI,CAAE,MAAM,OAAO,UAAU,KAAM,CAAE,MAAM,OAAO,YAAY;AAC5D,UAAM,IAAI;AAAA,MACR,4CAA4C,SAAS;AAAA,IACvD;AAIF,MAAM,EAAE,OAAO,MAAM,UAAU,IAAI,MAAM,mBAAmB,SAAS;AAGrE,QAAM,oBAAoB,WAAW,OAAO,SAAS;AAErD,MAAM,QAAQ,mBAAmB,aAAa,MAAM,YAAY,CAAC,CAAC;AAElE,MAAI,CAAC;AACH,WAAO,EAAE,SAAS,IAAO,OAAO,YAAY,WAAW,kCAAkC;AAG3F,MAAM,eAAe,MAAM,KACrB,WAAW,MAAM,OAGjB,YAAY,oBAAoB,MAAM,YAAY,CAAC,GAAG,YAAY;AAExE,MAAI,UAAU,SAAS,KAAK,CAAC,QAAQ,WAAW;AAE9C,QAAM,YAAY,UAAU,IAAI,OAAK,OAAO,CAAC,EAAE,EAAE,KAAK;AAAA,CAAI;AAC1D,WAAO;AAAA,MACL,YAAY,YAAY,SAAS,UAAU,MAAM;AAAA,EAA0B,SAAS;AAAA;AAAA,IAEtF;AAAA,EACF;AAEA,MAAI,QAAQ,aAAa,UAAU,SAAS,GAAG;AAC7C,QAAMC,aAAY,yBAAyB,gBAAgB,QAAW,SAAS,GAAG,SAAS;AAE3F,aAAW,YAAY,UAAU,QAAQ,GAAG;AAC1C,UAAM,aAAa,mBAAmB,UAAU,MAAM,YAAY,CAAC,CAAC;AACpE,UAAI,CAAC,WAAY;AAEjB,UAAM,EAAE,SAAS,aAAa,IAAI,MAAM;AAAA,QACtC,WAAW,MAAM,SAAS,CAAC;AAAA,QAC3B;AAAA,QACA,WAAW;AAAA,QACXA;AAAA,QACA,EAAE,QAAQ,GAAM;AAAA,MAClB;AAEA,YAAM,wBAAwB,WAAW,CAAC,WAAW,GAAG,CAAC,GACzD,0BAA0B,OAAO,WAAW,GAAG,GAC/C,MAAM,oBAAoB,WAAW,WAAW,GAAG,GACnD,MAAM,gCAAgC,WAAW,WAAW,GAAG,GAE/D,OAAO,KAAK,gCAAgC,WAAW,GAAG,aAAa,aAAa,MAAM,QAAQ;AAAA,IACpG;AAAA,EACF;AAEA,MAAM,YAAY,yBAAyB,gBAAgB,QAAW,SAAS,GAAG,SAAS;AAE3F,MAAI,QAAQ,QAAQ;AAClB,QAAM,MAAM,cAAc,WAAW,GAC/B,kBAAkB,MAAM;AAAA,MAC5B,SAAS,SAAS,CAAC;AAAA,MACnB;AAAA,MACA;AAAA,MACA;AAAA,MACA,EAAE,QAAQ,GAAK;AAAA,IACjB,GACM,WAAW,MAAM,wBAAwB,WAAW,CAAC,YAAY,GAAG,EAAE,QAAQ,GAAK,CAAC;AAC1F,QAAI,KAAK,0BAA0B,gBAAgB,QAAQ,MAAM,cAAc,YAAY,EAAE;AAC7F,aAAW,YAAY,gBAAgB;AACrC,UAAI,KAAK,MAAM,QAAQ,EAAE;AAE3B,WAAI,SAAS,QAAQ,SAAS,MAC5B,IAAI,KAAK,uBAAuB,GAChC,SAAS,QAAQ,QAAQ,OAAK,IAAI,KAAK,MAAM,CAAC,EAAE,CAAC,IAE5C;AAAA,MACL,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,cAAc,gBAAgB;AAAA,QAC9B,kBAAkB,SAAS;AAAA,MAC7B;AAAA,IACF;AAAA,EACF;AAEA,MAAM,EAAE,SAAS,SAAS,QAAQ,IAAI,MAAM;AAAA,IAC1C,SAAS,SAAS,CAAC;AAAA,IACnB;AAAA,IACA;AAAA,IACA;AAAA,IACA,EAAE,QAAQ,GAAM;AAAA,EAClB,GAEM,aAAa,MAAM,wBAAwB,WAAW,CAAC,YAAY,CAAC;AAG1E,4BAA0B,OAAO,YAAY,GAC7C,MAAM,oBAAoB,EAAE,MAAM,WAAW,MAAM,CAAC,GAGpD,MAAM,oBAAoB,WAAW,YAAY,GAGjD,MAAM,gCAAgC,WAAW,YAAY;AAG7D,MAAM,gBAAgB,6BAA6B,SAAS,GAEtD,uBAAuB,QAAQ,IAAI,kBAAgBD,MAAK,KAAK,WAAW,YAAY,CAAC;AAC3F,eAAM,oBAAoB,WAAW,sBAAsB,aAAa,GAExE,OAAO,KAAK,eAAe,YAAY,aAAa,QAAQ,MAAM,mBAAmB,QAAQ,MAAM,eAAe,GAE3G;AAAA,IACL,SAAS;AAAA,IACT,MAAM;AAAA,MACJ,cAAc;AAAA,MACd,kBAAkB,CAAC,GAAG,WAAW,SAAS,GAAG,OAAO;AAAA,IACtD;AAAA,EACF;AACF;AAEA,eAAsB,8BACpB,aACA,oBACA,UAA4B,CAAC,GAC7B,aACiD;AACjD,MAAM,YAAY,YAAY,WACxB,aAAa,uBAAuB,SAAS,GAC7C,eAAe,uBAAuB,SAAS;AAErD,MAAI,CAAE,MAAM,OAAO,UAAU,KAAM,CAAE,MAAM,OAAO,YAAY;AAC5D,UAAM,IAAI;AAAA,MACR,4CAA4C,SAAS;AAAA,IACvD;AAGF,MAAM,EAAE,OAAO,MAAM,UAAU,IAAI,MAAM,mBAAmB,SAAS,GAC/D,QAAQ,mBAAmB,aAAa,MAAM,YAAY,CAAC,CAAC;AAElE,MAAI,CAAC;AACH,WAAO,EAAE,SAAS,IAAO,OAAO,YAAY,WAAW,kCAAkC;AAG3F,MAAM,eAAe,MAAM,KACrB,WAAW,MAAM,OAEjB,gBAAwE,CAAC;AAC/E,WAAW,OAAO;AAChB,IAAI,SAAS,MAAM,GAAG,MACpB,cAAc,GAAG,IAAI,SAAS,MAAM,GAAG;AAI3C,MAAM,YAAY,yBAAyB,gBAAgB,QAAW,SAAS,GAAG,SAAS;AAE3F,MAAI,QAAQ,QAAQ;AAClB,QAAM,MAAM,cAAc,WAAW,GAC/B,kBAAkB,MAAM;AAAA,MAC5B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,EAAE,QAAQ,GAAK;AAAA,IACjB;AACA,QAAI,KAAK,0BAA0B,gBAAgB,QAAQ,MAAM,cAAc,YAAY,EAAE;AAC7F,aAAW,YAAY,gBAAgB;AACrC,UAAI,KAAK,MAAM,QAAQ,EAAE;AAE3B,WAAO;AAAA,MACL,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,cAAc,gBAAgB;AAAA,QAC9B,kBAAkB,CAAC;AAAA,MACrB;AAAA,IACF;AAAA,EACF;AAEA,MAAM,EAAE,SAAS,SAAS,QAAQ,IAAI,MAAM;AAAA,IAC1C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,EAAE,QAAQ,GAAM;AAAA,EAClB;AAEA,+BAA6B,OAAO,cAAc,kBAAkB,GACpE,MAAM,oBAAoB,EAAE,MAAM,WAAW,MAAM,CAAC;AAGpD,MAAM,eAAe,MAAM,SAAS,YAAY;AAChD,GAAI,CAAC,gBAAgB,OAAO,KAAK,aAAa,SAAS,CAAC,CAAC,EAAE,WAAW,MACpE,MAAM,oBAAoB,WAAW,YAAY;AAGnD,MAAM,gBAAgB,6BAA6B,SAAS,GACtD,uBAAuB,QAAQ,IAAI,kBAAgBA,MAAK,KAAK,WAAW,YAAY,CAAC;AAC3F,eAAM,oBAAoB,WAAW,sBAAsB,aAAa,GAExE,OAAO,KAAK,gCAAgC,YAAY,aAAa,QAAQ,MAAM,mBAAmB,QAAQ,MAAM,eAAe,GAE5H;AAAA,IACL,SAAS;AAAA,IACT,MAAM;AAAA,MACJ,cAAc;AAAA,MACd,kBAAkB;AAAA,IACpB;AAAA,EACF;AACF;;;AI1QO,SAAS,sBAAsB,QAO7B;AACP,MAAM,iBAAiB,qBAAqB,OAAO,WAAW;AAE9D,MAAI,OAAO,SAAS,SAAS,OAAO,QAAQ;AAC1C,QAAM,gBAAgB,gBAAgB,OAAO,MAAM,GAG7C,UAAU,eAAe,MAAM,oBAAoB,GACnD,kBAAkB,UAAU,MAAM,QAAQ,CAAC,CAAC,KAAK,gBACjD,mBAAmB,EAAQ,OAAO,gBACrC,mBAAmB;AAEtB,WAAO;AAAA,MACL,WAAW;AAAA,MACX;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,MAAI,OAAO,SAAS,UAAU,OAAO,WAAW;AAG9C,QAAM,mBAAmB,EAAQ,OAAO;AACxC,WAAO;AAAA,MACL,WAAW,QAAQ,OAAO,SAAS;AAAA,MACnC,iBAAiB;AAAA,MACjB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,OAAO,SAAS,YAAY;AAE9B,QAAM,mBAAmB,GAAQ,OAAO,gBAAgB,OAAO;AAC/D,WAAO;AAAA,MACL,WAAW,YAAY,cAAc;AAAA,MACrC,iBAAiB;AAAA,MACjB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAYA,SAAS,gBACP,cACA,eACA,WACA,iBACA,YACS;AACT,MAAM,yBAAyB,qBAAqB,YAAY,GAC1D,iBAAiB,qBAAqB,eAAe;AAE3D,SAAI,eAAe,QAGb,0BAAuB,WAAW,iBAAiB,GAAG,KACtD,2BAA2B,kBAI3B,eAAe,WAAW,yBAAyB,GAAG,KACtD,mBAAmB,0BAMrB,eAAe,UAOf,eAAe,aACV,2BAA2B,kBAChC,uBAAuB,WAAW,iBAAiB,GAAG,KACtD,eAAe,WAAW,yBAAyB,GAAG,IAGnD;AACT;AAmBA,eAAsB,iBACpB,QACA,WAC4B;AAC5B,MAAM,WAAW,sBAAsB,MAAM;AAC7C,MAAI,CAAC;AACH,WAAO,EAAE,MAAM,OAAO;AAGxB,MAAM,EAAE,WAAW,gBAAgB,IAAI,UAIjC,YADW,MAAM,mBAAmB,SAAS,GACzB,MAAM,YAAY,CAAC;AAE7C,MAAI,OAAO,KAAK,QAAQ,EAAE,WAAW;AACnC,WAAO,EAAE,MAAM,OAAO;AAGxB,MAAM,qBAAqB,qBAAqB,OAAO,WAAW,GAO5D,kBAAmC,CAAC;AAE1C,WAAW,gBAAgB,OAAO,KAAK,QAAQ;AAC7C,IAAI,qBAAqB,YAAY,MAAM,sBAKvC,gBAAgB,cAAc,SAAS,YAAY,GAAG,WAAW,iBAAiB,OAAO,IAAI,KACpE,qBAAqB,YAAY,EACrC,WAAW,qBAAqB,GAAG,KACxD,gBAAgB,KAAK,EAAE,aAAa,aAAa,CAAC;AAKxD,MAAI,gBAAgB,SAAS;AAC3B,WAAO,EAAE,MAAM,WAAW,gBAAgB;AAQ5C,WAAW,gBAAgB,OAAO,KAAK,QAAQ,GAAG;AAChD,QAAI,CAAC,gBAAgB,cAAc,SAAS,YAAY,GAAG,WAAW,iBAAiB,OAAO,IAAI;AAChG;AAGF,QAAM,qBAAqB,qBAAqB,YAAY;AAC5D,QAAI,mBAAmB,WAAW,qBAAqB,GAAG;AACxD,aAAO;AAAA,QACL,MAAM;AAAA,QACN,iBAAiB;AAAA,MACnB;AAAA,EAEJ;AAEA,SAAO,EAAE,MAAM,OAAO;AACxB;AAgBA,eAAsB,mBACpB,QACA,aACe;AACf,WAAW,SAAS,OAAO,iBAAiB;AAC1C,QAAM,kBAAkB,MAAM,qBAAqB,MAAM,aAAa,CAAC,GAAG,WAAW;AAErF,QAAI,CAAC,gBAAgB;AACnB,YAAM,IAAI;AAAA,QACR,qCAAqC,MAAM,WAAW,KAAK,gBAAgB,SAAS,eAAe;AAAA,MACrG;AAAA,EAEJ;AACF;;;ACzQA,SAAS,QAAAE,cAAY;;;ACsBrB,eAAsB,oCACpB,aACA,OACA,QAC4B;AAC5B,MAAM,YAAY,MAAM,kBAAkB,IAAI,MAAM,aAAa,IAE3D,SAAS,MAAM,OAAO;AAAA,IAC1B,wBAAwB,WAAW,eAAe,MAAM,aAAa,0BAA0B,MAAM,aAAa;AAAA,IAClH;AAAA,MACE,EAAE,OAAO,4BAA4B,SAAS,IAAI,OAAO,SAAS;AAAA,MAClE,EAAE,OAAO,gCAAgC,OAAO,SAAS;AAAA,MACzD,EAAE,OAAO,qBAAqB,OAAO,OAAO;AAAA,IAC9C;AAAA,EACF;AAEA,SAAI,WAAW,WACN,EAAE,QAAQ,UAAU,UAAU,UAAU,IAG7C,WAAW,WAKN,EAAE,QAAQ,UAAU,UAJP,MAAM,OAAO,KAAK,wBAAwB;AAAA,IAC5D,aAAa;AAAA,IACb,SAAS;AAAA,EACX,CAAC,EACgD,IAG5C,EAAE,QAAQ,OAAO;AAC1B;;;ADHA,eAAsB,yBAAyB,aAAkD;AAC/F,MAAM,aAAa;AAAA,IACjBC,OAAK,aAAa,iBAAiB;AAAA,IACnCA,OAAK,aAAa,gBAAgB,iBAAiB;AAAA,EACrD;AAEA,WAAW,aAAa;AACtB,QAAI;AAEF,cADY,MAAM,gBAAgB,SAAS,GAChC;AAAA,IACb,QAAQ;AAAA,IAER;AAIJ;AAWA,eAAsB,+BACpB,KACA,aAC6B;AAC7B,MAAM,eAAe,uBAAuB,GAAG;AAE/C,MAAI;AACF,QAAM,SAAS,MAAM,gBAAgB,YAAY,GAC3C,aAAa,qBAAqB,WAAW,GAE7C,UAAU;AAAA,MACd,GAAI,OAAO,gBAAgB,CAAC;AAAA,MAC5B,GAAI,OAAO,kBAAkB,KAAK,CAAC;AAAA,IACrC;AAEA,aAAW,OAAO;AAChB,UAAI,0BAA0B,IAAI,MAAM,UAAU;AAChD,eAAO,IAAI;AAIf;AAAA,EACF,QAAQ;AACN;AAAA,EACF;AACF;AAWO,SAAS,uBACd,eACA,eACoB;AAEpB,MAAI,CAAC,iBAAiB,qBAAqB,aAAa;AACtD,WAAO,EAAE,QAAQ,iBAAiB,eAAe,cAAc;AAIjE,MAAI,CAAC;AACH,WAAO,EAAE,QAAQ,iBAAiB,eAAe,cAAc;AAIjE,MAAI,iBAAiB,eAAe,aAAa;AAC/C,WAAO,EAAE,QAAQ,aAAa,eAAe,cAAc;AAI7D,MAAM,cAAc,mBAAmB,aAAa,GAC9C,iBAAiB,iBAAiB,WAAW;AAEnD,SAAO;AAAA,IACL,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAeA,eAAsB,uBACpB,aACA,OACA,SACA,QACA,aACsE;AAEtE,MAAI,QAAQ;AACV,WAAO,EAAE,QAAQ,UAAU,UAAU,QAAQ,gBAAgB;AAI/D,MAAI,QAAQ,OAAO;AACjB,QAAM,cAAc,mBAAmB,MAAM,aAAc;AAE3D,WAAO,EAAE,QAAQ,UAAU,UADV,iBAAiB,WAAW,EACT;AAAA,EACtC;AAGA,MAAI;AACF,WAAO,MAAM,oCAAoC,aAAa,OAAO,MAAM;AAAA,EAC7E,SAAS,OAAO;AAEd,QAAI,iBAAiB,2BAA2B;AAC9C,UAAM,YAAY,MAAM,kBAAkB,IAAI,MAAM,aAAa;AACjE,YAAM,IAAI;AAAA,QACR,6BAA6B,WAAW,WAAW,MAAM,aAAa,0BAC7C,MAAM,aAAa;AAAA;AAAA;AAAA,SAElC,WAAW,IAAI,WAAW,IAAI,SAAS;AAAA,SACvC,WAAW;AAAA;AAAA,MACvB;AAAA,IACF;AACA,UAAM;AAAA,EACR;AACF;AAUA,eAAsB,oBACpB,KACA,aACA,eACA,UACe;AACf,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA;AAAA,IACY;AAAA;AAAA,IACU;AAAA;AAAA,IACT;AAAA,EACf;AACF;AAUA,eAAsB,mBACpB,KACA,aACA,YACe;AACf,MAAI;AACF,QAAM,SAAS,MAAM,mBAAmB,GAAG,GACrC,MAAM,OAAO,MAAM,WAAW,WAAW;AAC/C,QAAI,CAAC,IAAK;AAEV,QAAI,UAAU,YACd,MAAM,oBAAoB,MAAM,GAChC,OAAO,MAAM,uCAAuC,WAAW,OAAO,UAAU,EAAE;AAGlF,QAAI;AACF,UAAM,aAAa,MAAM,aAAa,GAAG,GACnC,UAAU,WAAW,SAAS,SAAS,WAAW;AACxD,MAAI,YACF,QAAQ,UAAU,YAClB,MAAM,cAAc,UAAU;AAAA,IAElC,QAAQ;AAAA,IAAuC;AAAA,EACjD,SAAS,OAAO;AACd,WAAO,KAAK,mCAAmC,KAAK,EAAE;AAAA,EACxD;AACF;;;AE7OA,SAAS,8BAA8B,KAAgC;AACrE,MAAI,CAAC,IAAI,OAAO;AACd,UAAM,IAAI,MAAM,uDAAuD;AAEzE,MAAI,CAAC,IAAI,OAAO;AACd,UAAM,IAAI,MAAM,+EAA+E;AAEjG,MAAI,CAAC,IAAI,OAAO;AACd,UAAM,IAAI,MAAM,+EAA+E;AAEjG,MAAI,CAAC,MAAM,QAAQ,IAAI,gBAAgB,KAAK,IAAI,iBAAiB,WAAW;AAC1E,UAAM,IAAI,MAAM,6FAA6F;AAE/G,MAAI,CAAC,IAAI,iBAAiB,KAAK,OAAM,EAAU,MAAM;AACnD,UAAM,IAAI,MAAM,+EAA+E;AAEnG;AAeA,eAAsB,iBAAiB,KAAuD;AAC5F,MAAI,IAAI,SAAS,SAAS,IAAI;AAC5B,WAAO;AAGT,MAAM,SAAS,MAAM,iBAAiB,IAAI,QAAQ,IAAI,SAAS;AAE/D,UAAQ,OAAO,MAAM;AAAA,IACnB,KAAK;AACH,mBAAM,mBAAmB,QAAQ,IAAI,SAAS,GAC9C,IAAI,qBAAqB,OAAO,gBAAgB,IAAI,OAAK,EAAE,WAAW,GAC/D;AAAA,IAET,KAAK,mBAAmB;AACtB,UAAM,MAAM,cAAc,IAAI,SAAS,GACjC,eAAe,IAAI,OAAO,gBAC9B,IAAI,OAAO,YAAY,QAAQ,wBAAwB,EAAE;AAC3D,iBAAI,KAAK,YAAY,YAAY,6BAA6B,OAAO,eAAe,EAAE,GAC/E;AAAA,IACT;AAAA,IAEA,KAAK;AAAA,IACL;AACE,aAAO;AAAA,EACX;AACF;AAWA,eAAsB,0BACpB,KACwB;AACxB,SAAO,KAAK,0CAA0C;AAAA,IACpD,MAAM,IAAI;AAAA,IACV,YAAY,IAAI,OAAO;AAAA,IACvB,aAAa,IAAI,OAAO;AAAA,EAC1B,CAAC;AAED,MAAM,MAAM,cAAc,IAAI,SAAS,GACjC,cAAc,IAAI,OAAO,eAAe,WAC1C,qBAAoC,MAIpC;AACJ,MAAI;AAoBF,QAnBA,WAAW,IAAI,QAAQ,GACvB,SAAS,MAAM,cAAc,WAAW,EAAE,GAItC,IAAI,SAAS,aACf,MAAM,0BAA0B,IAAI,SAAS,GAK/C,MAAM,iBAAiB,KAAK,QAAW,QAAQ,GAG/C,8BAA8B,GAAG,GAIN,MAAM,iBAAiB,GAAG,MAC1B;AACzB,sBAAS,KAAK,GACP,2BAA2B,GAAG;AAKvC,KAAI,IAAI,OAAO,SAAS,UAAU,IAAI,OAAO,SAAS,iBACpD,SAAS,KAAK,GACd,MAAM,uBAAuB,GAAG,GAChC,WAAW,IAAI,QAAQ,GACvB,SAAS,MAAM,aAAa,IAAI,OAAO,WAAW,EAAE,IAKtD,SAAS,QAAQ,aAAa,IAAI,OAAO,WAAW,EAAE,GACtD,MAAM,aAAa,GAAG,GAEtB,qBAAqB,IAAI,uBAAuB,MAEhD,SAAS,KAAK,YAAY,IAAI,OAAO,WAAW,EAAE;AAAA,EACpD,SAAS,OAAO;AACd,oBAAU,KAAK,GAEf,MAAM,qBAAqB,IAAI,uBAAuB,IAAI,GACpD;AAAA,EACR;AAKA,MAAI;AACJ,MAAI;AAGF,QAAI,CADkB,MAAM,sBAAsB,GAAG;AAEnD,aAAO,yBAAyB,GAAG;AAIrC,eAAW,IAAI,QAAQ,GACvB,SAAS,MAAM,cAAc,IAAI,OAAO,WAAW,EAAE;AAGrD,QAAM,gBAAgB,MAAM,yBAAyB,GAAG;AAGxD,WAAI,cAAc,aAAa,CAAC,cAAc,qBAC5C,SAAS,KAAK,GACP;AAAA,MACL,SAAS;AAAA,MACT,OAAO,qBAAqB,IAAI,OAAO,WAAW,KAAK,IAAI,OAAO,KAAK,IAAI,CAAC;AAAA,IAC9E,MAIE,qBAAqB,GAAG,KAC1B,MAAM,oBAAoB,GAAG,GAG/B,SAAS,KAAK,aAAa,IAAI,OAAO,WAAW,EAAE,GAG5C,MAAM,mBAAmB,KAAK,aAAa;AAAA,EAEpD,SAAS,OAAO;AACd,cAAU,KAAK,GAEf,OAAO,MAAM,uBAAuB,IAAI,OAAO,WAAW,KAAK,KAAK;AAEpE,QAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAC1E,eAAI,MAAM,qBAAqB,IAAI,OAAO,WAAW,KAAK,YAAY,EAAE,GAEjE;AAAA,MACL,SAAS;AAAA,MACT,OAAO;AAAA,MACP,UAAU,IAAI,SAAS,SAAS,IAAI,IAAI,WAAW;AAAA,IACrD;AAAA,EACF,UAAE;AACA,UAAM,qBAAqB,kBAAkB;AAAA,EAC/C;AACF;AAKA,SAAS,yBAAyB,KAAyC;AAEzE,SADY,cAAc,IAAI,SAAS,EACnC,KAAK,gCAAgC,GAElC;AAAA,IACL,SAAS;AAAA,IACT,MAAM;AAAA,MACJ,aAAa,IAAI,OAAO;AAAA,MACxB,WAAW;AAAA,MACX,SAAS;AAAA,MACT,eAAe;AAAA,IACjB;AAAA,EACF;AACF;AAKA,SAAS,2BAA2B,KAAyC;AAC3E,SAAO;AAAA,IACL,SAAS;AAAA,IACT,MAAM;AAAA,MACJ,aAAa,IAAI,OAAO;AAAA,MACxB,WAAW;AAAA,MACX,SAAS;AAAA,MACT,QAAQ;AAAA,IACV;AAAA,EACF;AACF;AAQA,eAAe,uBAAuB,KAAyC;AAC7E,MAAM,gBAAgB,IAAI,OAAO;AACjC,MAAI,CAAC,cAAe;AAEpB,MAAM,gBAAgB,MAAM;AAAA,IAC1B,IAAI;AAAA,IAAW,IAAI,OAAO;AAAA,EAC5B,GACM,QAAQ,uBAAuB,eAAe,aAAa;AAEjE,MAAI,MAAM,WAAW,WAAY;AAEjC,MAAM,SAAS,cAAc,IAAI,SAAS,GACpC,aAAa,MAAM;AAAA,IACvB,IAAI,OAAO;AAAA,IAAa;AAAA,IAAO,EAAE,OAAO,IAAI,SAAS,MAAM;AAAA,IAAG;AAAA,IAAQ;AAAA,EACxE;AAEA,MAAI,WAAW,WAAW;AACxB,UAAM,IAAI,MAAM,gDAAgD,IAAI,OAAO,WAAW,EAAE;AAG1F,QAAM,oBAAoB,IAAI,WAAW,IAAI,OAAO,aAAa,eAAe,WAAW,QAAQ;AACrG;;;AC3PA,eAAsB,wBACpB,UACA,SACwB;AACxB,MAAI,SAAS,WAAW;AACtB,WAAO,EAAE,SAAS,IAAM,MAAM,EAAE,WAAW,GAAG,SAAS,GAAG,SAAS,CAAC,EAAE,EAAE;AAG1E,MAAM,EAAE,aAAa,wBAAwB,SAAS,IAAI,WAAW,CAAC,GAChE,MAAM,cAAc,SAAS,CAAC,EAAE,SAAS,GAIzC,EAAE,QAAQ,gBAAgB,cAAc,WAAW,IACvD,MAAM,uBAAuB,UAAU,GAAG;AAE5C,MAAI,eAAe,WAAW;AAC5B,WAAO;AAAA,MACL,SAAS;AAAA,MACT,MAAM;AAAA,QACJ,WAAW;AAAA,QACX,SAAS;AAAA,QACT,SAAS,CAAC;AAAA,QACV,QAAQ;AAAA,MACV;AAAA,IACF;AAGF,MAAI,YAAY,GACZ,UAAU,YACV,SAAS,GACP,UAAqE,CAAC,GACtE,iBAAsC,CAAC;AAE7C,WAAW,OAAO,gBAAgB;AAChC,IAAI,gBACF,IAAI,kBAAkB;AAGxB,QAAM,SAAS,MAAM,0BAA0B,GAAG,GAC5C,OAAQ,OAAO,MAAc,eAAe,IAAI,OAAO,eAAe;AAE5E,QAAI,OAAO,SAAS;AAClB,mBAAc,OAAO,MAAc,aAAa,GAChD,WAAY,OAAO,MAAc,WAAW;AAC5C,UAAM,aAAc,OAAe;AACnC,MAAI,cACF,eAAe,KAAK,UAAU;AAAA,IAElC,WACE,UAAU,GACN,UAAU;AAEZ,cAAQ,KAAK;AAAA,QACX;AAAA,QACA,SAAS;AAAA,QACT,OAAO,OAAO;AAAA,MAChB,CAAC;AACD;AAAA,IACF;AAGF,YAAQ,KAAK;AAAA,MACX;AAAA,MACA,SAAS,OAAO;AAAA,MAChB,OAAO,OAAO,UAAU,SAAY,OAAO;AAAA,IAC7C,CAAC;AAAA,EACH;AAEA,MAAI,eAAe,eAAe,SAAS,GAAG;AAC5C,QAAM,SAAS,uBAAuB,gBAAgB;AAAA,MACpD,aAAa,0BAA0B,SAAS,CAAC,EAAE,OAAO;AAAA,MAC1D,gBAAgB;AAAA,IAClB,CAAC,GACK,SAAS,cAAc,SAAS,CAAC,EAAE,SAAS;AAClD,+BAA2B,QAAQ,MAAM;AAAA,EAC3C;AAEA,MAAM,UAAU,WAAW;AAC3B,SAAO;AAAA,IACL;AAAA,IACA,MAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,OAAO,UAAU,SAAY,GAAG,MAAM,YAAY,WAAW,IAAI,KAAK,GAAG;AAAA,EAC3E;AACF;AAUA,eAAsB,uBACpB,UACA,KACkE;AAClE,MAAM,SAAgC,CAAC,GACnC,eAAe;AAEnB,WAAW,OAAO,UAAU;AAC1B,QAAI,IAAI,SAAS,OAAO;AACtB,UAAI,sBAAsB,IAC1B,OAAO,KAAK,GAAG;AACf;AAAA,IACF;AAEA,QAAM,SAAS,MAAM,iBAAiB,IAAI,QAAQ,IAAI,SAAS;AAC/D,QAAI,OAAO,SAAS,mBAAmB;AACrC,UAAM,eAAe,IAAI,OAAO,gBAC9B,IAAI,OAAO,YAAY,QAAQ,wBAAwB,EAAE;AAC3D,UAAI,KAAK,YAAY,YAAY,6BAA6B,OAAO,eAAe,EAAE,GACtF;AAAA,IACF;AACE,UAAI,sBAAsB,IAC1B,OAAO,KAAK,GAAG;AAAA,EAEnB;AAEA,SAAO,EAAE,QAAQ,aAAa;AAChC;AAEO,SAAS,uBACd,MACA,WACmB;AACnB,MAAM,QAAQ,KAAK,CAAC,GACd,iBAAiB,KAAK,QAAQ,OAAK,EAAE,kBAAkB,CAAC,CAAC,GACzD,eAAe,KAAK,QAAQ,OAAK,EAAE,gBAAgB,CAAC,CAAC,GAErD,gBAAgB,KAAK,QAAQ,OAAK,EAAE,iBAAiB,aAAa,CAAC,CAAC,GACpE,cAAc,KAAK,QAAQ,OAAK,EAAE,iBAAiB,WAAW,CAAC,CAAC,GAChE,cAAc,KAAK,QAAQ,OAAK,EAAE,iBAAiB,WAAW,CAAC,CAAC,GAEhE,aAAa,KAAK,OAAO,CAAC,KAAK,MAAM,OAAO,EAAE,cAAc,IAAI,CAAC,GACjE,SAAS,KAAK,QAAQ,OAAK,EAAE,UAAU,CAAC,CAAC,GACzC,kBAAkB,KAAK,QAAQ,OAAK,EAAE,mBAAmB,CAAC,CAAC,GAC3D,iBAAiB,KAAK,QAAQ,OAAK,EAAE,kBAAkB,CAAC,CAAC,GACzD,eAAe,KAAK,QAAQ,OAAK,EAAE,gBAAgB,CAAC,CAAC,GACrD,oBAAoB,KAAK,QAAQ,OAAK,EAAE,qBAAqB,CAAC,CAAC;AAErE,SAAO;AAAA,IACL,GAAG;AAAA,IACH,aAAa,UAAU,eAAe,MAAM;AAAA,IAC5C;AAAA,IACA;AAAA,IACA,iBAAiB;AAAA,MACf,WAAW;AAAA,MACX,SAAS;AAAA,MACT,SAAS;AAAA,IACX;AAAA,IACA;AAAA,IACA;AAAA,IACA,YAAY,gBAAgB,SAAS,KAAK;AAAA,IAC1C,iBAAiB,gBAAgB,SAAS,IAAI,kBAAkB;AAAA,IAChE;AAAA,IACA,cAAc,aAAa,SAAS,IAAI,eAAe;AAAA,IACvD,mBAAmB,kBAAkB,SAAS,IAAI,oBAAoB;AAAA,IACtE,kBAAkB,MAAM;AAAA,IACxB,gBAAgB,UAAU;AAAA,EAC5B;AACF;;;ACxLA,SAAS,QAAAC,cAAY;;;ACOrB,SAAS,QAAAC,QAAM,YAAAC,YAAU,WAAAC,WAAS,YAAAC,YAAU,WAAAC,UAAS,OAAAC,YAAW;;;ACPhE,SAAS,YAAAC,YAAU,YAAAC,kBAAgB;AAMnC,SAAS,oBAAoB,UAAkB,UAAkB,SAA0B;AAGzF,SAFqBC,WAAS,UAAU,QAAQ,EAAE,QAAQ,OAAO,GAAG,EACtC,MAAM,GAAG,EAAE,OAAO,OAAO,EACvC,MAAM,GAAG,EAAE,EAAE,SAAS,OAAO;AAC/C;AAEA,eAAsB,wBACpB,UACA,cACmB;AACnB,MAAM,aAAa,mBAAmB,YAAY;AAClD,MAAI,CAAC,WAAW,WAAW,CAAC,WAAW;AACrC,WAAO,CAAC;AAGV,MAAM,QAAkB,CAAC;AAEzB,iBAAiB,QAAQ,UAAU,UAAU,CAAC,GAAG,EAAE,aAAa,6BAA6B,CAAC;AAC5F,IAAIC,WAAS,IAAI,MAAM,WAAW,UAI7B,oBAAoB,UAAU,MAAM,WAAW,OAAO,KAI3D,MAAM,KAAK,IAAI;AAGjB,SAAO;AACT;;;ADqEA,eAAsB,wBACpB,UACA,UACA,SACmC;AACnC,MAAM,YAAwC,CAAC,GACzC,SAAmB,CAAC,GACpB,WAAWC,SAAQ,QAAQ,GAC3B,mBAAmBA,SAAQ,QAAQ,GACnC,aAAa,QAAQ,aAAa,IAAI,WAASA,SAAQ,kBAAkB,KAAK,CAAC,KAAK,CAAC,GAErF,YAAY,CAAC,YACb,WAAW,WAAW,IACjB,KAEF,WAAW,KAAK,eACjB,YAAY,YACP,KAEF,QAAQ,WAAW,GAAG,SAAS,GAAGC,IAAG,EAAE,CAC/C,GAGG,iBAAiB,CAAC,YACVC,WAAS,kBAAkB,OAAO,EACnC,QAAQ,OAAO,GAAG,EAAE,QAAQ,UAAU,EAAE;AAIrD,MAAI,QAAQ,UAAU,QAAQ,OAAO,SAAS,GAAG;AAC/C,QAAM,eAAe,MAAM,uBAAuB,UAAU,UAAU,SAAS,QAAQ,MAAM;AAE7F,aAAW,UAAU;AACnB,UAAI,OAAO,SAAS,OAAO,MAAM;AAC/B,YAAM,UAAUF,SAAQ,OAAO,IAAI;AACnC,YAAI,CAAC,UAAU,OAAO,GAAG;AACvB,iBAAO,KAAK,UAAU,OAAO,IAAI,sCAAsC;AACvE;AAAA,QACF;AACA,YAAM,eAAe,eAAe,OAAO;AAC3C,kBAAU,KAAK;AAAA,UACb,MAAM,OAAO;AAAA,UACb,cAAc;AAAA,UACd;AAAA,UACA,UAAU;AAAA,UACV,cAAc;AAAA,UACd,WAAY,OAAO,aAAa;AAAA,UAChC,iBAAiB,OAAO;AAAA,QAC1B,CAAC;AAAA,MACH,MAAO,CAAI,OAAO,SAChB,OAAO,KAAK,OAAO,KAAK;AAAA,EAG9B;AAGA,MAAI,QAAQ,UAAU,QAAQ,OAAO,SAAS,GAAG;AAC/C,QAAM,eAAe,MAAM,YAAY,UAAU,QAAQ,MAAM;AAE/D,aAAW,UAAU;AACnB,UAAI,OAAO,SAAS,OAAO,QAAQ,OAAO,YAAY;AACpD,YAAM,SAASA,SAAQ,OAAO,UAAU;AACxC,YAAI,CAAC,UAAU,MAAM,GAAG;AACtB,iBAAO,KAAK,UAAU,OAAO,IAAI,sCAAsC;AACvE;AAAA,QACF;AACA,YAAM,eAAe,eAAe,MAAM;AAC1C,kBAAU,KAAK;AAAA,UACb,MAAM,OAAO;AAAA,UACb,cAAc;AAAA,UACd;AAAA,UACA,UAAU;AAAA,UACV,cAAc;AAAA,UACd,WAAY,OAAO,aAAa;AAAA,UAChC,iBAAiB,OAAO;AAAA,QAC1B,CAAC;AAAA,MACH,MAAO,CAAI,OAAO,SAChB,OAAO,KAAK,OAAO,KAAK;AAAA,EAG9B;AAGA,MAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,GAAG;AAC7C,QAAM,cAAc,MAAM,uBAAuB,UAAU,SAAS,QAAQ,QAAQ,KAAK;AAEzF,aAAW,UAAU;AACnB,UAAI,OAAO,SAAS,OAAO,MAAM;AAC/B,YAAM,UAAUA,SAAQ,OAAO,IAAI;AACnC,YAAI,CAAC,UAAU,OAAO,GAAG;AACvB,iBAAO,KAAK,SAAS,OAAO,IAAI,sCAAsC;AACtE;AAAA,QACF;AACA,YAAM,eAAe,eAAe,OAAO;AAC3C,kBAAU,KAAK;AAAA,UACb,MAAM,OAAO;AAAA,UACb,cAAc;AAAA,UACd;AAAA,UACA,UAAU;AAAA,UACV,cAAc;AAAA,UACd,WAAY,OAAO,aAAa;AAAA,UAChC,iBAAiB,OAAO;AAAA,QAC1B,CAAC;AAAA,MACH,MAAO,CAAI,OAAO,SAChB,OAAO,KAAK,OAAO,KAAK;AAAA,EAG9B;AAGA,MAAI,QAAQ,YAAY,QAAQ,SAAS,SAAS,GAAG;AACnD,QAAM,iBAAiB,MAAM,uBAAuB,UAAU,YAAY,WAAW,QAAQ,QAAQ;AAErG,aAAW,UAAU;AACnB,UAAI,OAAO,SAAS,OAAO,MAAM;AAC/B,YAAM,UAAUA,SAAQ,OAAO,IAAI;AACnC,YAAI,CAAC,UAAU,OAAO,GAAG;AACvB,iBAAO,KAAK,YAAY,OAAO,IAAI,sCAAsC;AACzE;AAAA,QACF;AACA,YAAM,eAAe,eAAe,OAAO;AAC3C,kBAAU,KAAK;AAAA,UACb,MAAM,OAAO;AAAA,UACb,cAAc;AAAA,UACd;AAAA,UACA,UAAU;AAAA,UACV,cAAc;AAAA,UACd,WAAY,OAAO,aAAa;AAAA,UAChC,iBAAiB,OAAO;AAAA,QAC1B,CAAC;AAAA,MACH,MAAO,CAAI,OAAO,SAChB,OAAO,KAAK,OAAO,KAAK;AAAA,EAG9B;AAEA,gBAAO,KAAK,8BAA8B;AAAA,IACxC,eAAe,UAAU;AAAA,IACzB,YAAY,OAAO;AAAA,EACrB,CAAC,GAEM;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AAMA,eAAe,uBACb,UACA,QACA,eACA,gBACgC;AAChC,MAAM,UAAiC,CAAC,GAElC,MAAMG,OAAK,UAAU,MAAM,GAC3B,QAAkB,CAAC;AAEzB,MAAI,MAAM,OAAO,GAAG;AAClB,mBAAiB,QAAQC,WAAU,GAAG;AACpC,MAAI,KAAK,SAAS,KAAK,KACrB,MAAM,KAAK,IAAI;AAKrB,WAAW,QAAQ,gBAAgB;AACjC,QAAM,QAAQ,MAAM,2BAA2B,OAAO,IAAI;AAE1D,IAAI,QACF,QAAQ,KAAK;AAAA,MACX;AAAA,MACA,OAAO;AAAA,MACP,MAAM,MAAM;AAAA,MACZ,WAAW,MAAM;AAAA,MACjB,SAAS,MAAM;AAAA,IACjB,CAAC,IAED,QAAQ,KAAK;AAAA,MACX;AAAA,MACA,OAAO;AAAA,MACP,OAAO,GAAG,aAAa,KAAK,IAAI;AAAA,IAClC,CAAC;AAAA,EAEL;AAEA,SAAO;AACT;AAKA,eAAe,2BACb,OACA,MAC2F;AAE3F,WAAW,QAAQ;AACjB,QAAI;AACF,UAAM,UAAU,MAAM,aAAa,IAAI,GACjC,EAAE,YAAY,IAAI,iBAAiB,OAAO;AAChD,UAAI,aAAa,SAAS,MAAM;AAC9B,YAAM,UAAUC,+BAA8B,WAAW;AACzD,eAAO,EAAE,MAAM,MAAM,WAAW,eAAe,QAAQ;AAAA,MACzD;AAAA,IACF,QAAgB;AAAA,IAEhB;AAIF,MAAM,aAAa,MAAM,OAAO,OAAKC,WAAS,GAAG,KAAK,MAAM,IAAI;AAEhE,MAAI,WAAW,WAAW,GAAG;AAC3B,QAAM,OAAO,WAAW,CAAC,GACnB,UAAU,MAAM,uBAAuB,IAAI;AACjD,WAAO,EAAE,MAAM,MAAM,WAAW,YAAY,QAAQ;AAAA,EACtD;AAEA,MAAI,WAAW,SAAS,GAAG;AAEzB,QAAM,UAAU,WAAW;AAAA,MAAK,CAAC,GAAG,MAClC,EAAE,MAAM,GAAG,EAAE,SAAS,EAAE,MAAM,GAAG,EAAE;AAAA,IACrC,EAAE,CAAC,GACG,UAAU,MAAM,uBAAuB,OAAO;AACpD,WAAO,EAAE,MAAM,SAAS,WAAW,YAAY,QAAQ;AAAA,EACzD;AAEA,SAAO;AACT;AASA,eAAe,YACb,UACA,gBACgC;AAChC,MAAM,UAAiC,CAAC,GAClC,aAAa,MAAM,wBAAwB,UAAU,OAAO;AAElE,WAAW,QAAQ,gBAAgB;AACjC,QAAM,QAAQ,MAAM,gBAAgB,YAAY,IAAI;AAEpD,IAAI,QACF,QAAQ,KAAK;AAAA,MACX;AAAA,MACA,OAAO;AAAA,MACP,MAAM,MAAM;AAAA,MACZ,YAAYC,UAAQ,MAAM,IAAI;AAAA;AAAA,MAC9B,WAAW,MAAM;AAAA,MACjB,SAAS,MAAM;AAAA,IACjB,CAAC,IAED,QAAQ,KAAK;AAAA,MACX;AAAA,MACA,OAAO;AAAA,MACP,OAAO,UAAU,IAAI,yBAAyB,kBAAkB,OAAO,CAAC;AAAA,IAC1E,CAAC;AAAA,EAEL;AAEA,SAAO;AACT;AAMA,eAAe,gBACb,YACA,MAC0F;AAE1F,WAAW,QAAQ;AACjB,QAAI;AACF,UAAM,UAAU,MAAM,aAAa,IAAI,GACjC,EAAE,YAAY,IAAI,iBAAiB,OAAO;AAChD,UAAI,aAAa,SAAS,MAAM;AAC9B,YAAM,UAAUF,+BAA8B,WAAW;AACzD,eAAO,EAAE,MAAM,MAAM,WAAW,eAAe,QAAQ;AAAA,MACzD;AAAA,IACF,QAAgB;AAAA,IAEhB;AAIF,WAAW,QAAQ;AAEjB,QADgBC,WAASC,UAAQ,IAAI,CAAC,MACtB,MAAM;AACpB,UAAM,UAAU,MAAM,uBAAuB,IAAI;AACjD,aAAO,EAAE,MAAM,MAAM,WAAW,WAAW,QAAQ;AAAA,IACrD;AAIF,MAAM,mBAAmB,WAAW,OAAO,UACzBA,UAAQ,IAAI,EACH,MAAM,GAAG,EAClB,SAAS,IAAI,CAC9B;AAED,MAAI,iBAAiB,SAAS,GAAG;AAE/B,QAAM,UAAU,iBAAiB;AAAA,MAAK,CAAC,GAAG,MACxC,EAAE,MAAM,GAAG,EAAE,SAAS,EAAE,MAAM,GAAG,EAAE;AAAA,IACrC,EAAE,CAAC,GACG,UAAU,MAAM,uBAAuB,OAAO;AACpD,WAAO,EAAE,MAAM,SAAS,WAAW,WAAW,QAAQ;AAAA,EACxD;AAEA,SAAO;AACT;AAQO,SAAS,oBAAoB,QAAkB,QAA2B;AAC/E,MAAI,OAAO,WAAW;AACpB;AAGF,MAAM,MAAM,UAAU,cAAc;AACpC,MAAI,MAAM;AAAA,wCAA2C;AACrD,WAAW,SAAS;AAClB,QAAI,MAAM,OAAO,KAAK,EAAE;AAE5B;AAOA,SAASF,+BAA8B,aAAsC;AAC3E,MAAI,CAAC,eAAe,OAAO,eAAgB;AACzC;AAKF,MAAM,UAAU,YAAY,WAAW,YAAY,UAAU;AAE7D,MAAI,OAAO,WAAY,UAAU;AAC/B,QAAM,UAAU,QAAQ,KAAK;AAC7B,WAAO,QAAQ,SAAS,IAAI,UAAU;AAAA,EACxC;AAGF;AAMA,eAAe,uBAAuB,UAA+C;AACnF,MAAI;AACF,QAAM,UAAU,MAAM,aAAa,QAAQ,GACrC,EAAE,YAAY,IAAI,iBAAiB,OAAO;AAChD,WAAOA,+BAA8B,WAAW;AAAA,EAClD,QAAgB;AACd;AAAA,EACF;AACF;;;ADndA,eAAsB,8BACpB,aACA,SACiB;AACjB,MAAM,aAAa,CAAC,WAAW;AAC/B,WAAW,OAAO,CAAC,aAAa,WAAW,WAAW,gBAAgB;AACpE,eAAW,KAAKG,OAAK,aAAa,GAAG,CAAC;AAExC,WAAW,QAAQ;AAIjB,QAHI,QAAQ,QAAQ,UAAW,MAAM,OAAOA,OAAK,MAAM,QAAQ,CAAC,KAC5D,QAAQ,QAAQ,UAAW,MAAM,OAAOA,OAAK,MAAM,QAAQ,CAAC,KAC5D,QAAQ,OAAO,UAAW,MAAM,OAAOA,OAAK,MAAM,OAAO,CAAC,KAC1D,QAAQ,UAAU,UAAW,MAAM,OAAOA,OAAK,MAAM,UAAU,CAAC,EAAI,QAAO;AAEjF,SAAO;AACT;AAUA,eAAsB,4BACpB,UACA,UACA,SACqC;AACrC,MAAM,eAAe,MAAM,wBAAwB,UAAU,UAAU,OAAO;AAE9E,MAAI,aAAa,OAAO,SAAS,GAAG;AAGlC,QAFA,oBAAoB,aAAa,MAAM,GAEnC,aAAa,UAAU,WAAW;AACpC,YAAM,IAAI,MAAM,4CAA4C;AAG9D,WAAO,MAAM,mBAAmB,aAAa,UAAU,MAAM,cAAc;AAAA,EAC7E;AAEA,SAAO,aAAa;AACtB;AAkBA,eAAsB,4BACpB,SACA,QACA,SACA,MACgC;AAChC,MAAM,cAAc,OAAO,aACrB,WAAW,OAAO,gBAAgB,YAAY,aAE9C,WAAW,MAAM,mBAAmB,QAAQ,eAC9C,QAAQ,eACR,MAAM,8BAA8B,aAAa,OAAO,GAEtD,YAAY,MAAM,4BAA4B,UAAU,UAAU,OAAO,GACzE,mBAAmB,6BAA6B,SAAS,WAAW,QAAQ;AAClF,SAAO,uCAAuC,kBAAkB,QAAQ;AAC1E;;;AG9DA,eAAsB,wBACpB,WACA,aACA,gBACA,QACA,QAC6B;AAC7B,MAAM,MAAM,UAAU,cAAc,GAC9B,MAAM,UAAU,cAAc;AASpC,MAPA,OAAO,MAAM,gCAAgC;AAAA,IAC3C,SAAS;AAAA,IACT,SAAS;AAAA,IACT,OAAO,UAAU;AAAA,EACnB,CAAC,GAGG,UAAU,UAAU;AACtB,eAAI,KAAK,oCAAoC,GACtC,CAAC;AAIV,MAAM,iBAAiB,oBAAoB,SAAS;AAEpD,MAAI,OAAO,KAAK,cAAc,EAAE,WAAW;AACzC,eAAI,KAAK,gCAAgC,GAClC,CAAC;AAGV,MAAI;AAIF,QAAM,SAA8E,CAAC;AACrF,aAAW,CAAC,WAAW,OAAO,KAAK,OAAO,QAAQ,cAAc;AAC9D,aAAO,SAAS,IAAI,QAAQ,IAAI,UAAQ;AAAA,QACtC,OAAO,IAAI;AAAA,QACX,OAAO,IAAI;AAAA,MACb,EAAE;AAGJ,QAAM,oBAAoB,MAAM,IAAI;AAAA,MAClC;AAAA,MACA;AAAA,IACF;AAEA,QAAI,CAAC,qBAAqB,kBAAkB,WAAW;AACrD,oBAAO,KAAK,uDAAuD,GAC5D,CAAC;AAIV,QAAM,WAA+B,kBAAkB,IAAI,eAAa;AAAA,MACtE,cAAc,SAAS;AAAA,MACvB,cAAc,SAAS;AAAA,MACvB,aAAa,SAAS;AAAA,MACtB,UAAU,SAAS;AAAA,MACnB,aAAa,SAAS;AAAA,MACtB,SAAS,SAAS;AAAA,IACpB,EAAE;AAEF,kBAAO,KAAK,2BAA2B;AAAA,MACrC,OAAO,SAAS;AAAA,MAChB,OAAO,MAAM,KAAK,IAAI,IAAI,SAAS,IAAI,OAAK,EAAE,YAAY,CAAC,CAAC;AAAA,IAC9D,CAAC,GAEM;AAAA,EACT,SAAS,OAAO;AACd,QAAI,iBAAiB;AACnB,oBAAO,KAAK,mCAAmC,GACxC,CAAC;AAEV,UAAM;AAAA,EACR;AACF;AAKA,SAAS,oBACP,WACmF;AACnF,MAAM,iBAAoG,CAAC,GAGrG,YAAY,oBAAoB,EAAE,IAAI,UAAQ;AAAA,IAClD,MAAM,IAAI;AAAA,IACV,OAAO,IAAI;AAAA,EACb,EAAE;AAEF,WAAW,EAAE,MAAM,MAAM,KAAK,WAAW;AACvC,QAAM,YAAY,UAAU,OAAO,IAAI,IAAI;AAE3C,QAAI,CAAC,aAAa,UAAU,WAAW;AACrC;AAIF,QAAM,YAAY,GAAG,KAAK,KAAK,UAAU,MAAM,KACzC,eAAkF,CAAC;AAGzF,aAAW,YAAY,WAAW;AAChC,UAAM,gBAAgB,SAAS,UAAU,MAAM,SAAS,OAAO,MAAM,IAC/D,WAAW,YAAY,QAAQ,GAG/B,kBAAkB,SAAS,cAC7B,GAAG,SAAS,WAAW,MAAM,QAAQ,KACrC,UACE,uBAAuB,gBAAgB,iBAAiB,CAAC;AAE/D,mBAAa,KAAK;AAAA,QAChB,OAAO;AAAA,QACP,OAAO,GAAG,SAAS,WAAW,GAAG,aAAa;AAAA,QAC9C,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAEA,mBAAe,SAAS,IAAI;AAAA,EAC9B;AAEA,SAAO;AACT;AAKA,SAAS,YAAY,UAAsC;AACzD,MAAMC,QAAO,SAAS;AAEtB,SAAI,SAAS,gBAAgB,cACpB,GAAGA,KAAI,MAGTA;AACT;AAMA,SAAS,gBAAgB,MAAc,UAA0B;AAE/D,MAAM,WAAW,KAAe;AAEhC,SAAI,KAAK,UAAU,WACV,OAIF,KAAK,UAAU,GAAG,WAAW,CAAC,IAAI;AAC3C;;;AzIjGA,eAAsB,iBACpB,cACA,SAC8B;AAC9B,SAAO,MAAM,gCAAgC,EAAE,cAAc,QAAQ,CAAC;AAEtE,MAAI;AACF,QAAM,UAAU,MAAM,aAAa,YAAY,GACzC,WAAW,KAAK,MAAM,OAAO;AASnC,QANI,CAAC,SAAS,QAAQ,SAAS,aAC7B,SAAS,OAAOC,WAAS,QAAQ,QAAQ,GACzC,OAAO,MAAM,yDAAyD,EAAE,MAAM,SAAS,KAAK,CAAC,IAI3F,CAAC,SAAS;AACZ,YAAM,IAAI,gBAAgB,mDAAmD;AAG/E,QAAI,CAAC,SAAS,WAAW,CAAC,MAAM,QAAQ,SAAS,OAAO;AACtD,YAAM,IAAI,gBAAgB,uDAAuD;AAGnF,QAAI,SAAS,QAAQ,WAAW;AAC9B,YAAM,IAAI,gBAAgB,iCAAiC;AAI7D,aAAW,UAAU,SAAS,SAAS;AACrC,UAAI,CAAC,OAAO;AACV,cAAM,IAAI,gBAAgB,uDAAuD;AAEnF,UAAI,CAAC,OAAO;AACV,cAAM,IAAI,gBAAgB,WAAW,OAAO,IAAI,kCAAkC;AAIpF,UAAI;AACF,8BAAsB,OAAO,QAAQ,OAAO,IAAI;AAAA,MAClD,SAAS,OAAO;AACd,cAAM,IAAI;AAAA,UACR,WAAW,OAAO,IAAI,yBAAyB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,QACvG;AAAA,MACF;AAAA,IACF;AAEA,kBAAO,KAAK,+BAA+B;AAAA,MACzC,MAAM,SAAS;AAAA,MACf,aAAa,SAAS,QAAQ;AAAA,IAChC,CAAC,GAEM;AAAA,EAET,SAAS,OAAO;AACd,UAAI,iBAAiB,kBACb,QAEF,IAAI,gBAAgB,2CAA2C,YAAY,KAAK,KAAK,EAAE;AAAA,EAC/F;AACF;AAQA,eAAsB,sBACpB,aACA,aACiB;AACjB,MAAM,MAAM,cAAc,WAAW,GAC/B,SAAS,cAAc,WAAW;AAGxC,EAAI,YAAY,eACd,IAAI,QAAQ,YAAY,WAAW;AAGrC,MAAM,UAAU,YAAY,QACzB,KAAK,CAAC,GAAG,MAAM,EAAE,KAAK,cAAc,EAAE,IAAI,CAAC,EAC3C,IAAI,aAAW;AAAA,IACd,OAAO,OAAO;AAAA,IACd,OAAO,OAAO;AAAA,IACd,aAAa,OAAO,eAAe;AAAA,EACrC,EAAE;AAEJ,MAAI;AACF,QAAM,iBAAiB,MAAM,OAAO;AAAA,MAClC;AAAA,MACA;AAAA,IACF;AAEA,kBAAO,KAAK,wBAAwB,EAAE,UAAU,eAAe,CAAC,GACzD;AAAA,EACT,SAAS,OAAO;AACd,QAAI,iBAAiB;AACnB,oBAAO,KAAK,iCAAiC,GACtC;AAET,UAAM;AAAA,EACR;AACF;AAaA,eAAsB,kBAAkB,YAAoB,aAA2D;AACrH,MAAM,SAAS,cAAc,WAAW;AACxC,MAAI;AACF,QAAM,OAAO,MAAM,OAAO;AAAA,MACxB,iCAAiC,UAAU;AAAA,MAC3C;AAAA,QACE;AAAA,UACE,OAAO;AAAA,UACP,OAAO;AAAA,UACP,aAAa;AAAA,QACf;AAAA,QACA;AAAA,UACE,OAAO;AAAA,UACP,OAAO;AAAA,UACP,aAAa;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAEA,kBAAO,KAAK,8BAA8B,EAAE,KAAK,CAAC,GAC3C;AAAA,EACT,SAAS,OAAO;AACd,QAAI,iBAAiB;AACnB,oBAAO,KAAK,uCAAuC,GAC5C;AAET,UAAM;AAAA,EACR;AACF;AAYA,eAAe,qBACb,WACA,aACA,SACA,UACwB;AACxB,MAAM,MAAM,cAAc,QAAQ,SAAS;AAC3C,SAAO,KAAK,wCAAwC;AAAA,IAClD,QAAQ,YAAY;AAAA,IACpB,MAAM;AAAA,EACR,CAAC;AAGD,MAAM,IAAI,IAAI,QAAQ;AACtB,IAAE,MAAM,uBAAuB;AAE/B,MAAM,YAAY,MAAM,kBAAkB,WAAW,QAAQ;AAU7D,MAPI,UAAU,UAAU,IACtB,EAAE,KAAK,oBAAoB,IAE3B,EAAE,KAAK,GAAG,UAAU,KAAK,YAAY,UAAU,UAAU,IAAI,KAAK,GAAG,aAAa,GAIhF,UAAU,UAAU;AACtB,eAAI,KAAK,+CAA+C,GACjD;AAAA,MACL,SAAS;AAAA,MACT,MAAM,EAAE,WAAW,GAAG,SAAS,EAAE;AAAA,IACnC;AAIF,MAAM,WAA+B,MAAM;AAAA,IACzC;AAAA,IACA,QAAQ,OAAO,eAAe,YAAY;AAAA,IAC1C,QAAQ,OAAO;AAAA,IACf,cAAc,QAAQ,SAAS;AAAA,IAC/B,cAAc,QAAQ,SAAS;AAAA,EACjC;AAEA,MAAI,SAAS,WAAW;AACtB,WAAO;AAAA,MACL,SAAS;AAAA,MACT,MAAM,EAAE,WAAW,GAAG,SAAS,EAAE;AAAA,IACnC;AAIF,MAAM,gBAA4C,SAAS,IAAI,CAAAC,QAAM;AAAA,IACnE,MAAMA,GAAE;AAAA,IACR,cAAcA,GAAE;AAAA,IAChB,cAAcA,GAAE;AAAA,IAChB,UAAUC,SAAQ,SAAS;AAAA,IAC3B,cAAcD,GAAE;AAAA,IAChB,WAAW;AAAA,IACX,iBAAiBA,GAAE;AAAA,EACrB,EAAE,GAGI,mBAAmB;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,EACF,EAAE,IAAI,SAEA,GAAG,OAAO,SAAS,WACrB,GAAG,OAAO,YAAY,WAEjB,GACR,GAGK,SAAS,MAAM,wBAAwB,kBAAkB;AAAA,IAC7D,aAAa;AAAA,IACb,wBAAwB,QAAQ,OAAO,eAAe,YAAY;AAAA,EACpE,CAAC;AAED,SAAO;AAAA,IACL,SAAS,OAAO;AAAA,IAChB,OAAO,OAAO;AAAA,IACd,MAAM;AAAA,MACJ,WAAW,OAAO,MAAM,aAAa;AAAA,MACrC,SAAS,OAAO,MAAM,WAAW;AAAA,IACnC;AAAA,EACF;AACF;AAeA,eAAsB,0BACpB,gBACA,aACA,cACA,aACA,mBACA,mBACA,sBACA,SACA,aACA,oBACwB;AACxB,SAAO,KAAK,iCAAiC;AAAA,IAC3C,aAAa,YAAY;AAAA,IACzB,QAAQ;AAAA,IACR,MAAM;AAAA,EACR,CAAC;AAED,MAAM,MAAM,cAAc,WAAW,GAE/B,cAAc,YAAY,QAAQ,KAAK,OAAK,EAAE,SAAS,YAAY;AACzE,MAAI,CAAC,aAAa;AAChB,QAAM,QAAQ,WAAW,YAAY;AACrC,kBAAO,MAAM,OAAO,EAAE,aAAa,YAAY,KAAK,CAAC,GACrD,IAAI,MAAM,GAAG,YAAY,KAAK,KAAK,EAAE,GAC9B,EAAE,SAAS,IAAO,MAAM;AAAA,EACjC;AAGA,MAAI;AACJ,MAAI;AACF,uBAAmB,sBAAsB,YAAY,QAAQ,YAAY;AAAA,EAC3E,SAAS,OAAO;AACd,WAAO,MAAM,qCAAqC,EAAE,QAAQ,cAAc,MAAM,CAAC;AACjF,QAAM,WAAW,iBAAiB,QAAQ,MAAM,UAAU;AAC1D,eAAI,MAAM,GAAG,YAAY,KAAK,QAAQ,EAAE,GACjC,EAAE,SAAS,IAAO,OAAO,SAAS;AAAA,EAC3C;AAGA,MAAI;AACF,QAAI;AAEJ,QAAI,qBAAqB,gBAAgB;AACvC,sBAAgB,MAAM;AAAA,QACpB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,aACS,YAAY,gBAAgB;AACrC,sBAAgB,MAAM;AAAA,QACpB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA;AAEA,YAAM,IAAI,MAAM,4BAA4B,iBAAiB,IAAI,EAAE;AAGrE,WAAO;AAAA,EAET,SAAS,OAAO;AACd,WAAO,MAAM,4BAA4B,EAAE,QAAQ,cAAc,MAAM,CAAC;AACxE,QAAM,WAAW,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACtE,eAAI,MAAM,GAAG,YAAY,KAAK,QAAQ,EAAE,GACjC,EAAE,SAAS,IAAO,OAAO,SAAS;AAAA,EAC3C;AACF;AAKA,eAAe,0BACb,gBACA,aACA,aACA,kBACA,aACA,mBACA,mBACA,sBACA,SACA,aACA,oBACwB;AACxB,MAAM,MAAM,cAAc,WAAW,GAC/B,eAAe,iBAAiB,cAChC,YAAYE,OAAK,gBAAgB,YAAY;AAGnD,MAAI,CAAE,MAAM,OAAO,SAAS,GAAI;AAC9B,QAAM,QAAQ,SAAS,YAAY;AACnC,kBAAO,MAAM,yBAAyB;AAAA,MACpC,QAAQ,YAAY;AAAA,MACpB,MAAM;AAAA,MACN,UAAU;AAAA,IACZ,CAAC,GACD,IAAI,MAAM,GAAG,YAAY,IAAI,KAAK,KAAK,EAAE,GAClC,EAAE,SAAS,IAAO,MAAM;AAAA,EACjC;AAIA,MAF8B,GAAQ,oBAAoB,QAAQ,UAAU,oBAAoB,QAAQ,UAAU,oBAAoB,OAAO,UAAU,oBAAoB,UAAU,SAE1J;AACzB,WAAO,KAAK,gEAAgE;AAAA,MAC1E,QAAQ,YAAY;AAAA,MACpB,MAAM;AAAA,IACR,CAAC;AAED,QAAMC,OAAM,MAAM;AAAA,MAChB;AAAA,MACA;AAAA,MACA;AAAA,QACE,GAAG;AAAA,QACH,YAAY;AAAA,MACd;AAAA,IACF;AAEA,IAAAA,KAAI,OAAO,oBAAoB;AAAA,MAC7B,QAAQ;AAAA,MACR,QAAQ;AAAA,MACR,SAAS;AAAA,IACX,GAEAA,KAAI,OAAO,iBAAiB;AAAA,MAC1B,UAAU;AAAA,MACV,kBAAkB;AAAA,MAClB,mBAAmB;AAAA,QACjB,KAAK;AAAA,QACL,WAAW;AAAA,QACX,YAAY,YAAY;AAAA,MAC1B;AAAA,IACF,GAEAA,KAAI,eAAe,WACnBA,KAAI,eAAeC,WAAS,gBAAgB,SAAS,KAAK,KAG1DD,KAAI,OAAO,cAAc,0BAA0B;AAAA,MACjD,QAAQ;AAAA,MACR,MAAM;AAAA,IACR,CAAC;AAED,QAAM,YAAY,MAAM,4BAA4B,WAAW,gBAAgB,sBAAsB,CAAC,CAAC,GAEjG,mBAAmB,6BAA6BA,MAAK,WAAW,cAAc,EAAE,IAAI,SACpF,GAAG,OAAO,SAAS,WACrB,GAAG,OAAO,YAAY,iBAEjB,GACR,GAIK,cAAc,MAAM,wBAAwB,kBAAkB;AAAA,MAClE,aAAa;AAAA,MACb,wBAAwB,YAAY;AAAA,IACtC,CAAC;AACD,WAAO;AAAA,MACL,SAAS,YAAY;AAAA,MACrB,OAAO,YAAY;AAAA,IACrB;AAAA,EACF;AAGA,MAAM,MAAM,MAAM;AAAA,IAChB;AAAA,IACA;AAAA,IACA;AAAA,MACE,GAAG;AAAA,MACH,YAAY;AAAA,IACd;AAAA,EACF;AAqBA,MAlBA,IAAI,OAAO,oBAAoB;AAAA,IAC7B,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,EACX,GAGA,IAAI,OAAO,iBAAiB;AAAA,IAC1B,UAAU;AAAA,IACV,kBAAkB;AAAA,IAClB,mBAAmB;AAAA,MACjB,KAAK;AAAA,MACL,WAAW;AAAA,MACX,YAAY,YAAY;AAAA,IAC1B;AAAA,EACF,GAGI,gBAAgB;AAElB,kBAAO,KAAK,uDAAuD;AAAA,MACjE,QAAQ,YAAY;AAAA,MACpB,MAAM;AAAA,IACR,CAAC,GAED,IAAI,eAAe,WACnB,IAAI,eAAeC,WAAS,gBAAgB,SAAS,KAAK,KAGnD,MAAM,qBAAqB,WAAW,aAAa,KADzC,cACsD;AAIzE,SAAO,KAAK,oDAAoD;AAAA,IAC9D,QAAQ,YAAY;AAAA,IACpB,MAAM;AAAA,EACR,CAAC;AAGD,MAAM,YAAY,MAAM,4BAA4B,WAAW,WAAW;AAE1E,MAAI,CAAC,UAAU,UAAU;AACvB,QAAM,aAAa,YAAY,WAAW,KACtC,sGACA,IACE,QAAQ,SAAS,YAAY,qCAAqC,UAAU;AAClF,kBAAO,MAAM,4BAA4B;AAAA,MACvC,QAAQ,YAAY;AAAA,MACpB,MAAM;AAAA,MACN,QAAQ,YAAY;AAAA,IACtB,CAAC,GACD,IAAI,MAAM,GAAG,YAAY,IAAI,KAAK,KAAK,EAAE,GAClC,EAAE,SAAS,IAAO,MAAM;AAAA,EACjC;AAGA,MAAI,UAAU,gBACR,CAAE,MAAM,uBAAuB,UAAU,YAAY,GAAI;AAC3D,QAAM,QAAQ,+BAA+B,YAAY;AACzD,kBAAO,MAAM,2BAA2B,EAAE,QAAQ,YAAY,KAAK,CAAC,GACpE,IAAI,MAAM,GAAG,YAAY,IAAI,KAAK,KAAK,EAAE,GAClC,EAAE,SAAS,IAAO,MAAM;AAAA,EACjC;AAIF,MAAI,OAAO,iBAAiB;AAAA,IAC1B,GAAG,IAAI,OAAO;AAAA,IACd,YAAY,UAAU;AAAA,IACtB,cAAc,UAAU;AAAA,EAC1B;AAKA,MAAM,iBAAiB,MAAM,0BAA0B,GAAG;AAE1D,MAAI,CAAC,eAAe,SAAS;AAC3B,QAAM,gBAAgB,IAAI,OAAO,eAAe,YAAY;AAC5D,QAAI,MAAM,GAAG,aAAa,KAAK,eAAe,SAAS,eAAe,EAAE;AAAA,EAC1E;AAEA,SAAO;AAAA,IACL,SAAS,eAAe;AAAA,IACxB,OAAO,eAAe;AAAA,EACxB;AACF;AAKA,eAAe,iBACb,aACA,aACA,kBACA,aACA,SACA,aACA,oBACwB;AACxB,MAAM,MAAM,cAAc,WAAW,GAC/B,SAAS,iBAAiB,QAC1B,SAAS,iBAAiB,QAC1B,UAAU,iBAAiB;AAEjC,SAAO,KAAK,yBAAyB;AAAA,IACnC,QAAQ,YAAY;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAGD,MAAM,MAAM,MAAM;AAAA,IAChB;AAAA,IACA;AAAA,IACA;AAAA,MACE,GAAG;AAAA,MACH;AAAA,MACA;AAAA,IACF;AAAA,EACF;AASA,MANA,IAAI,OAAO,iBAAiB;AAAA,IAC1B,UAAU;AAAA,IACV,kBAAkB;AAAA,EACpB,GAE8B,GAAQ,oBAAoB,QAAQ,UAAU,oBAAoB,QAAQ,UAAU,oBAAoB,OAAO,UAAU,oBAAoB,UAAU,SAC1J;AAEzB,QAAM,SAAS,MADA,mBAAmB,IAAI,MAAM,EAChB,KAAK,IAAI,QAAQ,SAAS,WAAW;AAEjE,QAAI,OAAO,cAAc,OAAO,aAChC,IAAI,OAAO,UAAU,OAAO,SAC5B,IAAI,OAAO,cAAc,OAAO,aAChC,IAAI,OAAO,iBAAiB;AAAA,MAC1B,GAAG,OAAO;AAAA,MACV,GAAI,IAAI,OAAO,kBAAkB,CAAC;AAAA,MAClC,kBAAkB;AAAA,IACpB,GAEI,OAAO,gBAAgB,cACxB,IAAI,OAAe,aAAa,OAAO,eAAe,YAGrD,OAAO,gBAAgB,iBACzB,mBAAmB,KAAK,MAAM;AAGhC,QAAM,WAAW,IAAI,gBAAgB,OAAO,eAAe,YAAY,WACjE,WAAW,OAAO,gBAAgB,YAAY,OAAO,eAAe,UACpE,YAAY,MAAM,4BAA4B,UAAU,UAAU,sBAAsB,CAAC,CAAC,GAE1F,mBAAmB,6BAA6B,KAAK,WAAW,QAAQ,GACxE,cAAc,MAAM,wBAAwB,kBAAkB;AAAA,MAClE,aAAa;AAAA,MACb,wBAAwB,IAAI,OAAO,eAAe,YAAY;AAAA,IAChE,CAAC;AACD,WAAO;AAAA,MACL,SAAS,YAAY;AAAA,MACrB,OAAO,YAAY;AAAA,IACrB;AAAA,EACF;AAGA,MAAI,gBAAgB,WAAW;AAE7B,WAAO,KAAK,6CAA6C;AAAA,MACvD,QAAQ,YAAY;AAAA,IACtB,CAAC;AAGD,QAAM,SAAS,MADA,mBAAmB,IAAI,MAAM,EAChB,KAAK,IAAI,QAAQ,SAAS,WAAW;AAEjE,QAAI,OAAO,cAAc,OAAO,aAChC,IAAI,OAAO,UAAU,OAAO,SAC5B,IAAI,OAAO,cAAc,OAAO,aAChC,IAAI,OAAO,iBAAiB;AAAA,MAC1B,GAAG,OAAO;AAAA,MACV,GAAI,IAAI,OAAO,kBAAkB,CAAC;AAAA,MAClC,kBAAkB;AAAA,IACpB,GAEI,OAAO,gBAAgB,cACxB,IAAI,OAAe,aAAa,OAAO,eAAe,YAGrD,OAAO,gBAAgB,iBACzB,mBAAmB,KAAK,MAAM;AAGhC,QAAM,WAAW,IAAI,gBAAgB,OAAO,eAAe,YAAY,WACjE,WAAW,OAAO,gBAAgB,YAAY,OAAO,eAAe;AAE1E,WAAO,MAAM,qBAAqB,UAAU,aAAa,KAAK,QAAQ;AAAA,EACxE;AAGA,SAAO,KAAK,0CAA0C;AAAA,IACpD,QAAQ,YAAY;AAAA,EACtB,CAAC;AAID,MAAM,iBAAiB,MAAM,0BAA0B,GAAG;AAE1D,MAAI,CAAC,eAAe,SAAS;AAC3B,QAAM,gBAAgB,IAAI,OAAO,eAAe,YAAY;AAC5D,QAAI,MAAM,GAAG,aAAa,KAAK,eAAe,SAAS,eAAe,EAAE;AAAA,EAC1E;AAEA,SAAO;AAAA,IACL,SAAS,eAAe;AAAA,IACxB,OAAO,eAAe;AAAA,EACxB;AACF;AASO,SAAS,oBACd,aACA,kBACwC;AACxC,MAAM,iBAAiB,IAAI,IAAI,YAAY,QAAQ,IAAI,OAAK,EAAE,IAAI,CAAC,GAE7DC,SAAkB,CAAC,GACnB,UAAoB,CAAC;AAE3B,WAAW,QAAQ;AACjB,IAAI,eAAe,IAAI,IAAI,IACzBA,OAAM,KAAK,IAAI,IAEf,QAAQ,KAAK,IAAI;AAIrB,SAAO,EAAE,OAAAA,QAAO,QAAQ;AAC1B;AA2BA,eAAsB,0BACpB,gBACA,aACA,aACA,mBACA,mBACA,sBACA,SACA,aAC+B;AAC/B,MAAM,UAAgC,CAAC;AAEvC,WAAW,QAAQ,aAAa;AAC9B,QAAM,cAAc,YAAY,QAAQ,KAAK,OAAK,EAAE,SAAS,IAAI;AACjE,QAAI,CAAC,YAAa;AAElB,QAAI;AACJ,QAAI;AACF,yBAAmB,sBAAsB,YAAY,QAAQ,IAAI;AAAA,IACnE,SAAS,OAAO;AACd,aAAO,KAAK,8CAA8C,EAAE,QAAQ,MAAM,MAAM,CAAC;AACjF;AAAA,IACF;AAEA,QAAI;AACF,UAAI,qBAAqB,gBAAgB,GAAG;AAC1C,YAAM,eAAe,iBAAiB,cAChC,YAAYH,OAAK,gBAAgB,YAAY;AAEnD,YAAI,CAAE,MAAM,OAAO,SAAS,GAAI;AAC9B,iBAAO,KAAK,kCAAkC,EAAE,QAAQ,MAAM,MAAM,aAAa,CAAC;AAClF;AAAA,QACF;AAEA,YAAM,MAAM,MAAM,wBAAwB,aAAa,WAAW;AAAA,UAChE,GAAG;AAAA,UACH,YAAY;AAAA,QACd,CAAC;AAED,YAAI,OAAO,oBAAoB;AAAA,UAC7B,QAAQ;AAAA,UACR,QAAQ;AAAA,UACR,SAAS;AAAA,QACX,GAEA,IAAI,OAAO,iBAAiB;AAAA,UAC1B,UAAU;AAAA,UACV,kBAAkB;AAAA,UAClB,mBAAmB;AAAA,YACjB,KAAK;AAAA,YACL,WAAW;AAAA,YACX,YAAY,YAAY;AAAA,UAC1B;AAAA,QACF,GAEA,IAAI,eAAe,WACnB,IAAI,eAAeE,WAAS,gBAAgB,SAAS,KAAK,KAE1D,QAAQ,KAAK;AAAA,UACX;AAAA,UACA,SAAS;AAAA,UACT,UAAU;AAAA,UACV,UAAU;AAAA,QACZ,CAAC;AAAA,MACH,WAAW,YAAY,gBAAgB,GAAG;AACxC,YAAM,MAAM,MAAM,uBAAuB,aAAa,iBAAiB,QAAS;AAAA,UAC9E,GAAG;AAAA,UACH,QAAQ,iBAAiB;AAAA,UACzB,SAAS,iBAAiB;AAAA,QAC5B,CAAC;AAED,YAAI,OAAO,iBAAiB;AAAA,UAC1B,UAAU;AAAA,UACV,kBAAkB;AAAA,QACpB;AAGA,YAAM,SAAS,MADA,mBAAmB,IAAI,MAAM,EAChB,KAAK,IAAI,QAAQ,SAAS,WAAW;AAEjE,YAAI,OAAO,cAAc,OAAO,aAChC,IAAI,OAAO,UAAU,OAAO,SAC5B,IAAI,OAAO,cAAc,OAAO,aAChC,IAAI,OAAO,iBAAiB;AAAA,UAC1B,GAAG,OAAO;AAAA,UACV,GAAI,IAAI,OAAO,kBAAkB,CAAC;AAAA,UAClC,kBAAkB;AAAA,QACpB,GAEI,OAAO,gBAAgB,iBACzB,mBAAmB,KAAK,MAAM;AAGhC,YAAM,WAAW,IAAI,gBAAgB,OAAO,eAAe,YAAY,WACjE,WAAW,OAAO,gBAAgB,YAAY,OAAO,eAAe;AAE1E,gBAAQ,KAAK;AAAA,UACX;AAAA,UACA,SAAS;AAAA,UACT;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,SAAS,OAAO;AACd,aAAO,KAAK,qCAAqC,EAAE,QAAQ,MAAM,MAAM,CAAC;AACxE;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;;;ADr3BA,eAAsB,iBAAiB,SAAiD;AACtF,MAAM,YAAYE,OAAK,SAAS,aAAa,aAAa,GAGpD,qBAAqBA,OAAK,WAAW,cAAc,WAAW;AACpE,MAAI,MAAM,OAAO,kBAAkB;AACjC,kBAAO,KAAK,0CAA0C,EAAE,MAAM,mBAAmB,CAAC,GAC3E;AAAA,MACL,UAAU;AAAA,MACV,MAAM;AAAA,MACN,cAAc;AAAA,IAChB;AAIF,MAAM,0BAA0BA,OAAK,WAAW,cAAc,gBAAgB;AAC9E,SAAI,MAAM,OAAO,uBAAuB,KACtC,OAAO,KAAK,2CAA2C,EAAE,MAAM,wBAAwB,CAAC,GACjF;AAAA,IACL,UAAU;AAAA,IACV,MAAM;AAAA,IACN,cAAc;AAAA,EAChB,KAIK,EAAE,UAAU,GAAM;AAC3B;AAiBA,eAAsB,4BACpB,SACA,kBACgC;AAEhC,MAAM,oBAAoB,MAAM,iBAAiB,OAAO;AAExD,SAAI,kBAAkB,WACb,oBAIL,kBAAkB,WAAW,MACZ,MAAM,iBAAiB,OAAO,KAG/C,OAAO,KAAK,sDAAsD;AAAA,IAChE;AAAA,IACA,YAAY,iBAAiB;AAAA,EAC/B,CAAC,GAEM;AAAA,IACL,UAAU;AAAA,IACV,MAAM;AAAA,EACR,KAIG,EAAE,UAAU,GAAM;AAC3B;AAOA,eAAsB,iBAAiB,SAAmC;AACxE,MAAM,oBAAoB,CAAC,YAAY,UAAU,UAAU,OAAO,GAC5D,qBAAqB,CAAC,aAAa,WAAW;AAGpD,WAAW,UAAU,mBAAmB;AACtC,QAAM,aAAaA,OAAK,SAAS,MAAM;AACvC,QAAI,MAAM,OAAO,UAAU;AAEzB,UAAI;AAEF,aADc,MAAMC,IAAG,QAAQ,UAAU,GAC/B,SAAS;AACjB,iBAAO;AAAA,MAEX,QAAQ;AAAA,MAER;AAAA,EAEJ;AAGA,WAAW,QAAQ,oBAAoB;AACrC,QAAM,WAAWD,OAAK,SAAS,IAAI;AACnC,QAAI,MAAM,OAAO,QAAQ;AACvB,aAAO;AAAA,EAEX;AAEA,SAAO;AACT;AASA,eAAsB,wBAAwB,UAAuD;AACnG,MAAM,eAAeA,OAAK,UAAU,aAAa,eAAe,cAAc,gBAAgB;AAC9F,MAAI,CAAE,MAAM,OAAO,YAAY,EAAI,QAAO;AAC1C,MAAI;AACF,WAAO,MAAM,iBAAiB,cAAc,EAAE,UAAU,SAAS,CAAC;AAAA,EACpE,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAMA,eAAsB,uBAAuB,cAAwC;AACnF,MAAI;AACF,QAAM,UAAU,MAAM,aAAa,YAAY;AAC/C,gBAAK,MAAM,OAAO,GACX;AAAA,EACT,SAAS,OAAO;AACd,kBAAO,MAAM,mCAAmC,EAAE,cAAc,MAAM,CAAC,GAChE;AAAA,EACT;AACF;;;AJnIA,eAAsB,kBACpB,UACA,UACkC;AAClC,SAAO,MAAM,yBAAyB,EAAE,UAAU,SAAS,CAAC;AAE5D,MAAM,mBAAmBE,SAAQ,QAAQ,GACnC,mBAAmBA,SAAQ,QAAQ,GAEnC,eAAqC,CAAC,GAGtC,SAAS,MAAM,eAAe,kBAAkB,gBAAgB,GAChE,SAAS,MAAM,eAAe,kBAAkB,gBAAgB,GAChE,WAAW,MAAM,iBAAiB,kBAAkB,gBAAgB,GACpE,QAAQ,MAAM,cAAc,kBAAkB,gBAAgB,GAC9D,QAAQ,MAAM,cAAc,kBAAkB,gBAAgB,GAC9D,MAAM,MAAM,YAAY,kBAAkB,gBAAgB,GAC1D,EAAE,SAAS,UAAU,oBAAoB,IAAI,MAAM,gBAAgB,gBAAgB;AAEzF,eAAa,KAAK,GAAG,QAAQ,GAAG,QAAQ,GAAG,UAAU,GAAG,OAAO,GAAG,OAAO,GAAG,KAAK,GAAG,OAAO;AAG3F,MAAM,SAAS,oBAAI,IAAwC;AAC3D,WAAW,YAAY,cAAc;AACnC,QAAM,WAAW,OAAO,IAAI,SAAS,YAAY,KAAK,CAAC;AACvD,aAAS,KAAK,QAAQ,GACtB,OAAO,IAAI,SAAS,cAAc,QAAQ;AAAA,EAC5C;AAEA,gBAAO,KAAK,+BAA+B;AAAA,IACzC,OAAO,aAAa;AAAA,IACpB,QAAQ,OAAO;AAAA,IACf,QAAQ,OAAO;AAAA,IACf,UAAU,SAAS;AAAA,IACnB,OAAO,MAAM;AAAA,IACb,OAAO,MAAM;AAAA,IACb,KAAK,IAAI;AAAA,IACT,SAAS,QAAQ;AAAA,EACnB,CAAC,GAEM;AAAA,IACL,KAAK;AAAA,IACL;AAAA,IACA,OAAO,aAAa;AAAA,IACpB,UAAU;AAAA,IACV,UAAU;AAAA,IACV;AAAA,EACF;AACF;AAKA,eAAe,eACb,UACA,UAC+B;AAC/B,MAAM,YAAkC,CAAC,GACnC,YAAYC,OAAK,UAAU,QAAQ;AAEzC,MAAI,CAAE,MAAM,OAAO,SAAS;AAC1B,WAAO;AAGT,iBAAiB,QAAQC,WAAU,SAAS,GAAG;AAC7C,QAAI,CAAC,KAAK,SAAS,KAAK;AACtB;AAGF,QAAM,UAAU,MAAM,aAAa,IAAI,GACjC,WAAW,gCAAgC,OAAO,GAClD,eAAe,sBAAsB,MAAM,QAAQ;AAEzD,cAAU,KAAK;AAAA,MACb,cAAc;AAAA,MACd;AAAA,MACA,aAAa,sBAAsB,SAAS,MAAM,oBAAoB,IAAI,CAAC;AAAA,MAC3E,aAAa,SAAS;AAAA,MACtB,SAAS,SAAS;AAAA,MAClB,UAAU;AAAA,MACV,aAAa;AAAA,MACb,WAAW,SAAS,OAAO,gBAAgB;AAAA,IAC7C,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAKA,eAAe,eACb,UACA,UAC+B;AAC/B,MAAM,YAAkC,CAAC,GACnC,aAAa,MAAM,wBAAwB,UAAU,OAAO;AAElE,WAAW,QAAQ,YAAY;AAC7B,QAAI,CAAC,aAAaC,WAAS,IAAI,GAAG,OAAO;AACvC;AAGF,QAAM,WAAWC,UAAQ,IAAI,GACvB,UAAU,MAAM,aAAa,IAAI,GACjC,WAAW,gCAAgC,OAAO,GAClD,eAAe,sBAAsB,UAAU,QAAQ;AAE7D,cAAU,KAAK;AAAA,MACb,cAAc;AAAA,MACd;AAAA,MACA,aAAa,sBAAsB,SAAS,MAAM,wBAAwB,QAAQ,CAAC;AAAA,MACnF,aAAa,SAAS;AAAA,MACtB,SAAS,SAAS;AAAA,MAClB,UAAU;AAAA,MACV,aAAa;AAAA,MACb,WAAW,SAAS,OAAO,gBAAgB;AAAA,IAC7C,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAKA,eAAe,iBACb,UACA,UAC+B;AAC/B,MAAM,YAAkC,CAAC,GACnC,cAAcH,OAAK,UAAU,UAAU;AAE7C,MAAI,CAAE,MAAM,OAAO,WAAW;AAC5B,WAAO;AAGT,iBAAiB,QAAQC,WAAU,WAAW,GAAG;AAC/C,QAAI,CAAC,KAAK,SAAS,KAAK;AACtB;AAGF,QAAM,UAAU,MAAM,aAAa,IAAI,GACjC,WAAW,gCAAgC,OAAO,GAClD,eAAe,sBAAsB,MAAM,QAAQ;AAEzD,cAAU,KAAK;AAAA,MACb,cAAc;AAAA,MACd;AAAA,MACA,aAAa,sBAAsB,SAAS,MAAM,oBAAoB,IAAI,CAAC;AAAA,MAC3E,aAAa,SAAS;AAAA,MACtB,SAAS,SAAS;AAAA,MAClB,UAAU;AAAA,MACV,aAAa;AAAA,MACb,WAAW,SAAS,OAAO,gBAAgB;AAAA,IAC7C,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAKA,eAAe,cACb,UACA,UAC+B;AAC/B,MAAM,YAAkC,CAAC,GACnC,WAAWD,OAAK,UAAU,OAAO;AAEvC,MAAI,CAAE,MAAM,OAAO,QAAQ;AACzB,WAAO;AAGT,iBAAiB,QAAQC,WAAU,QAAQ,GAAG;AAC5C,QAAI,CAAC,KAAK,SAAS,KAAK;AACtB;AAGF,QAAM,UAAU,MAAM,aAAa,IAAI,GACjC,WAAW,gCAAgC,OAAO,GAClD,eAAe,sBAAsB,MAAM,QAAQ;AAEzD,cAAU,KAAK;AAAA,MACb,cAAc;AAAA,MACd;AAAA,MACA,aAAa,sBAAsB,SAAS,MAAM,oBAAoB,IAAI,CAAC;AAAA,MAC3E,aAAa,SAAS;AAAA,MACtB,SAAS,SAAS;AAAA,MAClB,UAAU;AAAA,MACV,aAAa;AAAA,MACb,WAAW,SAAS,OAAO,gBAAgB;AAAA,IAC7C,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAKA,eAAe,cACb,UACA,UAC+B;AAC/B,MAAM,YAAkC,CAAC,GACnC,WAAWD,OAAK,UAAU,OAAO;AAEvC,MAAI,CAAE,MAAM,OAAO,QAAQ;AACzB,WAAO;AAIT,iBAAiB,QAAQC,WAAU,QAAQ,GAAG;AAC5C,QAAM,eAAe,sBAAsB,MAAM,QAAQ,GACnD,cAAcC,WAAS,IAAI;AAEjC,cAAU,KAAK;AAAA,MACb,cAAc;AAAA,MACd;AAAA,MACA;AAAA,MACA,UAAU;AAAA,MACV,aAAa;AAAA,IACf,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAKA,eAAe,YACb,UACA,UAC+B;AAC/B,MAAM,YAAkC,CAAC,GACnC,WAAW,CAAC,aAAa,UAAU;AAEzC,WAAW,YAAY,UAAU;AAC/B,QAAM,WAAWF,OAAK,UAAU,QAAQ;AAExC,QAAI,MAAM,OAAO,QAAQ,GAAG;AAC1B,UAAM,eAAe,sBAAsB,UAAU,QAAQ;AAE7D,gBAAU,KAAK;AAAA,QACb,cAAc;AAAA,QACd;AAAA,QACA,aAAa;AAAA,QACb,aAAa;AAAA,QACb;AAAA,QACA,aAAa;AAAA,MACf,CAAC;AAGD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAOA,eAAe,gBACb,UACkF;AAClF,MAAM,WAAW,MAAM,wBAAwB,QAAQ;AACvD,SAAK,WAYE,EAAE,SAVO,SAAS,QAAQ,IAAI,aAAW;AAAA,IAC9C,cAAc;AAAA,IACd,cAAc,OAAO;AAAA,IACrB,aAAa,OAAO;AAAA,IACpB,aAAa,OAAO;AAAA,IACpB,SAAS,OAAO;AAAA,IAChB,UAAUA,OAAK,UAAU,aAAa,eAAe,cAAc,gBAAgB;AAAA,IACnF,aAAa;AAAA,EACf,EAAE,GAEgB,SAAS,IAZL,EAAE,SAAS,CAAC,GAAG,UAAU,KAAK;AAatD;AAKA,SAAS,sBACP,cACA,UACQ;AAER,SADYI,WAAS,UAAU,YAAY,EAChC,QAAQ,OAAO,GAAG,EAAE,QAAQ,SAAS,EAAE;AACpD;",
|
|
6
|
+
"names": ["join", "basename", "dirname", "relative", "resolve", "walkFiles", "isDirectory", "stat", "join", "fs", "join", "basename", "relative", "resolve", "basename", "join", "relative", "path", "basename", "relative", "semver", "semver", "semver", "join", "relative", "basename", "isJunk", "path", "join", "join", "relative", "basename", "readFile", "join", "join", "isJunk", "yaml", "path", "basename", "relative", "isJunk", "readFile", "join", "relative", "isJunk", "basename", "basename", "semver", "basename", "join", "relative", "semver", "path", "semver", "yaml", "path", "getPlatformRootFiles", "yaml", "homedir", "join", "createHash", "join", "basename", "createHash", "join", "join", "join", "homedir", "path", "applyFilter", "cacheManager", "path", "join", "resolve", "join", "resolve", "dirname", "relative", "isAbsolute", "path", "stat", "isAbsolute", "resolve", "dirname", "join", "relative", "resolve", "join", "join", "finalPath", "stat", "resolve", "dirname", "pluginDetection", "fs", "dirname", "join", "yaml", "join", "isGitSource", "dirname", "fs", "pkg", "metadata", "version", "join", "relative", "stat", "join", "relative", "isDirectory", "stat", "minimatch", "minimatch", "path", "path", "path", "operation", "config", "yaml", "yaml", "yaml", "path", "basename", "dirname", "extname", "minimatch", "basename", "dirname", "extname", "path", "readdir", "join", "readFile", "minimatch", "path", "join", "dirname", "rm", "readdir", "tmpdir", "createHash", "createHash", "join", "tmpdir", "readdir", "rm", "dirname", "opts", "readdir", "join", "readFile", "join", "join", "join", "join", "semver", "getInstalledPackageVersion", "join", "join", "join", "path", "join", "join", "join", "relative", "fs", "fs", "dirname", "join", "yaml", "sortAndDedupeStrings", "join", "sortAndDedupeStrings", "dirname", "fs", "relative", "join", "dirname", "basename", "relative", "extname", "fs", "path", "yaml", "TOML", "getNestedValue", "path", "setNestedValue", "deleteNestedValue", "path", "fs", "path", "minimatch", "path", "fs", "minimatch", "path", "results", "fs", "yaml", "getNestedValue", "setNestedValue", "deleteNestedValue", "basename", "join", "homedir", "minimatch", "isJunk", "createEmptyResult", "homedir", "join", "basename", "isJunk", "minimatch", "createEmptyResult", "path", "pluralKey", "isMarkerFile", "discoverResources", "scopeTag", "join", "dirname", "basename", "relative", "extname", "basename", "basename", "relative", "relative", "basename", "dirname", "join", "relative", "fs", "tmpdir", "mkdtemp", "rm", "minimatch", "mkdtemp", "join", "tmpdir", "rm", "walkFiles", "fs", "relative", "minimatch", "dirname", "basename", "join", "relative", "dirname", "basename", "extname", "dirname", "join", "fs", "yaml", "sep", "newRemainder", "dirname", "join", "relative", "relative", "join", "collectFilesUnderDirectory", "join", "join", "sep", "warning", "dirname", "fs", "allowedTargets", "relocatedFiles", "decision", "executionResult", "result", "join", "dirname", "basename", "relative", "extname", "walkFiles", "relative", "relative", "walkFiles", "relative", "join", "join", "yaml", "TOML", "join", "path", "readdir", "path", "readdir", "path", "isDirectory", "path", "join", "join", "fs", "relative", "effectiveVersion", "join", "join", "path", "path", "path", "join", "join", "path", "rootNames", "join", "join", "join", "join", "basename", "dirname", "relative", "resolve", "sep", "basename", "relative", "relative", "basename", "resolve", "sep", "relative", "join", "walkFiles", "extractVersionFromFrontmatter", "basename", "dirname", "join", "path", "basename", "s", "resolve", "join", "ctx", "relative", "valid", "join", "fs", "resolve", "join", "walkFiles", "basename", "dirname", "relative"]
|
|
7
|
+
}
|