@powerhousedao/codegen 6.0.2-staging.2 → 6.0.2-staging.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (732) hide show
  1. package/{dist/src/templates/boilerplate/LICENSE.js → LICENSE} +1 -4
  2. package/dist/file-builders-BraThjto.mjs +8365 -0
  3. package/dist/file-builders-BraThjto.mjs.map +1 -0
  4. package/dist/index-IUcK5JZl.d.mts +371 -0
  5. package/dist/index-IUcK5JZl.d.mts.map +1 -0
  6. package/dist/index.d.mts +239 -0
  7. package/dist/index.d.mts.map +1 -0
  8. package/dist/index.mjs +4997 -0
  9. package/dist/index.mjs.map +1 -0
  10. package/dist/src/file-builders/index.d.mts +2 -0
  11. package/dist/src/file-builders/index.mjs +2 -0
  12. package/dist/src/name-builders/index.d.mts +2 -0
  13. package/dist/src/name-builders/index.mjs +2 -0
  14. package/dist/src/templates/index.d.mts +375 -0
  15. package/dist/src/templates/index.d.mts.map +1 -0
  16. package/dist/src/templates/index.mjs +2 -0
  17. package/dist/src/utils/index.d.mts +151 -0
  18. package/dist/src/utils/index.d.mts.map +1 -0
  19. package/dist/src/utils/index.mjs +2 -0
  20. package/dist/validation-Z3z0BJlu.d.mts +43 -0
  21. package/dist/validation-Z3z0BJlu.d.mts.map +1 -0
  22. package/package.json +26 -44
  23. package/dist/index.d.ts +0 -8
  24. package/dist/index.d.ts.map +0 -1
  25. package/dist/index.js +0 -7
  26. package/dist/index.js.map +0 -1
  27. package/dist/src/codegen/.hygen/package.json +0 -6
  28. package/dist/src/codegen/.hygen/templates/powerhouse/generate-custom-subgraph/index.js +0 -10
  29. package/dist/src/codegen/.hygen/templates/powerhouse/generate-custom-subgraph/resolvers.esm.t +0 -20
  30. package/dist/src/codegen/.hygen/templates/powerhouse/generate-custom-subgraph/schema.esm.t +0 -20
  31. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model/actions.esm.t +0 -16
  32. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model/creators.esm.t +0 -9
  33. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model/customUtils.esm.t +0 -5
  34. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model/document-schema.esm.t +0 -56
  35. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model/document-type.esm.t +0 -6
  36. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model/documentModel.esm.t +0 -7
  37. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model/documentModelTest.esm.t +0 -121
  38. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model/hooks.esm.t +0 -49
  39. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model/index.esm.t +0 -23
  40. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model/index.js +0 -127
  41. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model/module.esm.t +0 -22
  42. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model/ph-factories.esm.t +0 -93
  43. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model/reducer.esm.t +0 -46
  44. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model/root-utils.esm.t +0 -11
  45. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model/rootActions.esm.t +0 -13
  46. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model/rootIndex.esm.t +0 -14
  47. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model/schema.esm.t +0 -6
  48. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model/src-index.esm.t +0 -5
  49. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model/types.esm.t +0 -31
  50. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model/utils.esm.t +0 -77
  51. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model-module/actions.esm.t +0 -22
  52. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model-module/creators.esm.t +0 -37
  53. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model-module/customTest.esm.t +0 -44
  54. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model-module/error.esm.t +0 -36
  55. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model-module/index.js +0 -71
  56. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model-module/operations.esm.t +0 -17
  57. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model-subgraph/index.js +0 -118
  58. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model-subgraph/resolvers.esm.t +0 -130
  59. package/dist/src/codegen/.hygen/templates/powerhouse/generate-document-model-subgraph/schema.esm.t +0 -44
  60. package/dist/src/codegen/.hygen/templates/powerhouse/generate-drive-editor/components/CreateDocument.esm.t +0 -58
  61. package/dist/src/codegen/.hygen/templates/powerhouse/generate-drive-editor/components/DriveContents.esm.t +0 -23
  62. package/dist/src/codegen/.hygen/templates/powerhouse/generate-drive-editor/components/DriveExplorer.esm.t +0 -32
  63. package/dist/src/codegen/.hygen/templates/powerhouse/generate-drive-editor/components/EmptyState.esm.t +0 -19
  64. package/dist/src/codegen/.hygen/templates/powerhouse/generate-drive-editor/components/Files.esm.t +0 -29
  65. package/dist/src/codegen/.hygen/templates/powerhouse/generate-drive-editor/components/FolderTree.esm.t +0 -108
  66. package/dist/src/codegen/.hygen/templates/powerhouse/generate-drive-editor/components/Folders.esm.t +0 -28
  67. package/dist/src/codegen/.hygen/templates/powerhouse/generate-drive-editor/components/NavigationBreadcrumbs.esm.t +0 -14
  68. package/dist/src/codegen/.hygen/templates/powerhouse/generate-drive-editor/config.esm.t +0 -11
  69. package/dist/src/codegen/.hygen/templates/powerhouse/generate-drive-editor/editor.esm.t +0 -20
  70. package/dist/src/codegen/.hygen/templates/powerhouse/generate-drive-editor/index.js +0 -25
  71. package/dist/src/codegen/.hygen/templates/powerhouse/generate-drive-editor/module.esm.t +0 -15
  72. package/dist/src/codegen/.hygen/templates/powerhouse/generate-editor/editor.esm.t +0 -80
  73. package/dist/src/codegen/.hygen/templates/powerhouse/generate-editor/index.js +0 -90
  74. package/dist/src/codegen/.hygen/templates/powerhouse/generate-editor/module.esm.t +0 -16
  75. package/dist/src/codegen/.hygen/templates/powerhouse/generate-import-script/index.esm.t +0 -59
  76. package/dist/src/codegen/.hygen/templates/powerhouse/generate-import-script/index.js +0 -9
  77. package/dist/src/codegen/.hygen/templates/powerhouse/generate-import-script/utils.esm.t +0 -100
  78. package/dist/src/codegen/.hygen/templates/powerhouse/generate-processor-analytics/factory.esm.t +0 -22
  79. package/dist/src/codegen/.hygen/templates/powerhouse/generate-processor-analytics/index.esm.t +0 -62
  80. package/dist/src/codegen/.hygen/templates/powerhouse/generate-processor-analytics/index.js +0 -12
  81. package/dist/src/codegen/.hygen/templates/powerhouse/generate-processor-analytics/index.root.esm.t +0 -10
  82. package/dist/src/codegen/.hygen/templates/powerhouse/generate-processor-analytics/index.root.inject.esm.t +0 -12
  83. package/dist/src/codegen/.hygen/templates/powerhouse/generate-processor-analytics/lib.esm.t +0 -34
  84. package/dist/src/codegen/.hygen/templates/powerhouse/generate-processor-analytics/lib.inject_call.esm.t +0 -7
  85. package/dist/src/codegen/.hygen/templates/powerhouse/generate-processor-analytics/lib.inject_export.esm.t +0 -7
  86. package/dist/src/codegen/.hygen/templates/powerhouse/generate-processor-analytics/lib.inject_import.esm.t +0 -7
  87. package/dist/src/codegen/.hygen/templates/powerhouse/generate-processor-relationalDb/factory.esm.t +0 -40
  88. package/dist/src/codegen/.hygen/templates/powerhouse/generate-processor-relationalDb/index.esm.t +0 -47
  89. package/dist/src/codegen/.hygen/templates/powerhouse/generate-processor-relationalDb/index.js +0 -12
  90. package/dist/src/codegen/.hygen/templates/powerhouse/generate-processor-relationalDb/index.root.esm.t +0 -10
  91. package/dist/src/codegen/.hygen/templates/powerhouse/generate-processor-relationalDb/index.root.inject.esm.t +0 -12
  92. package/dist/src/codegen/.hygen/templates/powerhouse/generate-processor-relationalDb/lib.esm.t +0 -34
  93. package/dist/src/codegen/.hygen/templates/powerhouse/generate-processor-relationalDb/lib.inject_call.esm.t +0 -7
  94. package/dist/src/codegen/.hygen/templates/powerhouse/generate-processor-relationalDb/lib.inject_export.esm.t +0 -9
  95. package/dist/src/codegen/.hygen/templates/powerhouse/generate-processor-relationalDb/lib.inject_import.esm.t +0 -7
  96. package/dist/src/codegen/.hygen/templates/powerhouse/generate-processor-relationalDb/migrations.esm.t +0 -23
  97. package/dist/src/codegen/.hygen/templates/powerhouse/generate-processor-relationalDb/schema.esm.t +0 -13
  98. package/dist/src/codegen/.hygen/templates/powerhouse/generate-subgraph/index.esm.t +0 -17
  99. package/dist/src/codegen/.hygen/templates/powerhouse/generate-subgraph/index.js +0 -10
  100. package/dist/src/codegen/.hygen/templates/powerhouse/generate-subgraph/lib.esm.t +0 -9
  101. package/dist/src/codegen/__tests__/config.d.ts +0 -3
  102. package/dist/src/codegen/__tests__/config.d.ts.map +0 -1
  103. package/dist/src/codegen/__tests__/config.js +0 -3
  104. package/dist/src/codegen/__tests__/config.js.map +0 -1
  105. package/dist/src/codegen/__tests__/constants.d.ts +0 -16
  106. package/dist/src/codegen/__tests__/constants.d.ts.map +0 -1
  107. package/dist/src/codegen/__tests__/constants.js +0 -16
  108. package/dist/src/codegen/__tests__/constants.js.map +0 -1
  109. package/dist/src/codegen/__tests__/fixtures/expected-reducer-content-v3.d.ts +0 -2
  110. package/dist/src/codegen/__tests__/fixtures/expected-reducer-content-v3.d.ts.map +0 -1
  111. package/dist/src/codegen/__tests__/fixtures/expected-reducer-content-v3.js +0 -9
  112. package/dist/src/codegen/__tests__/fixtures/expected-reducer-content-v3.js.map +0 -1
  113. package/dist/src/codegen/__tests__/fixtures/expected-reducer-content.d.ts +0 -3
  114. package/dist/src/codegen/__tests__/fixtures/expected-reducer-content.d.ts.map +0 -1
  115. package/dist/src/codegen/__tests__/fixtures/expected-reducer-content.js +0 -33
  116. package/dist/src/codegen/__tests__/fixtures/expected-reducer-content.js.map +0 -1
  117. package/dist/src/codegen/__tests__/fixtures/run-generated-tests.d.ts +0 -2
  118. package/dist/src/codegen/__tests__/fixtures/run-generated-tests.d.ts.map +0 -1
  119. package/dist/src/codegen/__tests__/fixtures/run-generated-tests.js +0 -23
  120. package/dist/src/codegen/__tests__/fixtures/run-generated-tests.js.map +0 -1
  121. package/dist/src/codegen/__tests__/fixtures/typecheck.d.ts +0 -2
  122. package/dist/src/codegen/__tests__/fixtures/typecheck.d.ts.map +0 -1
  123. package/dist/src/codegen/__tests__/fixtures/typecheck.js +0 -23
  124. package/dist/src/codegen/__tests__/fixtures/typecheck.js.map +0 -1
  125. package/dist/src/codegen/__tests__/generate-doc-model.test.d.ts +0 -2
  126. package/dist/src/codegen/__tests__/generate-doc-model.test.d.ts.map +0 -1
  127. package/dist/src/codegen/__tests__/generate-doc-model.test.js +0 -184
  128. package/dist/src/codegen/__tests__/generate-doc-model.test.js.map +0 -1
  129. package/dist/src/codegen/__tests__/generate-drive-editor.test.d.ts +0 -2
  130. package/dist/src/codegen/__tests__/generate-drive-editor.test.d.ts.map +0 -1
  131. package/dist/src/codegen/__tests__/generate-drive-editor.test.js +0 -182
  132. package/dist/src/codegen/__tests__/generate-drive-editor.test.js.map +0 -1
  133. package/dist/src/codegen/__tests__/generate-editor.test.d.ts +0 -2
  134. package/dist/src/codegen/__tests__/generate-editor.test.d.ts.map +0 -1
  135. package/dist/src/codegen/__tests__/generate-editor.test.js +0 -119
  136. package/dist/src/codegen/__tests__/generate-editor.test.js.map +0 -1
  137. package/dist/src/codegen/__tests__/generate-manifest.test.d.ts +0 -2
  138. package/dist/src/codegen/__tests__/generate-manifest.test.d.ts.map +0 -1
  139. package/dist/src/codegen/__tests__/generate-manifest.test.js +0 -193
  140. package/dist/src/codegen/__tests__/generate-manifest.test.js.map +0 -1
  141. package/dist/src/codegen/__tests__/generate-processor.test.d.ts +0 -2
  142. package/dist/src/codegen/__tests__/generate-processor.test.d.ts.map +0 -1
  143. package/dist/src/codegen/__tests__/generate-processor.test.js +0 -296
  144. package/dist/src/codegen/__tests__/generate-processor.test.js.map +0 -1
  145. package/dist/src/codegen/__tests__/global-setup.d.ts +0 -2
  146. package/dist/src/codegen/__tests__/global-setup.d.ts.map +0 -1
  147. package/dist/src/codegen/__tests__/global-setup.js +0 -9
  148. package/dist/src/codegen/__tests__/global-setup.js.map +0 -1
  149. package/dist/src/codegen/__tests__/ts-morph-generator.test.d.ts +0 -2
  150. package/dist/src/codegen/__tests__/ts-morph-generator.test.d.ts.map +0 -1
  151. package/dist/src/codegen/__tests__/ts-morph-generator.test.js +0 -73
  152. package/dist/src/codegen/__tests__/ts-morph-generator.test.js.map +0 -1
  153. package/dist/src/codegen/__tests__/utils.d.ts +0 -7
  154. package/dist/src/codegen/__tests__/utils.d.ts.map +0 -1
  155. package/dist/src/codegen/__tests__/utils.js +0 -51
  156. package/dist/src/codegen/__tests__/utils.js.map +0 -1
  157. package/dist/src/codegen/generate.d.ts +0 -92
  158. package/dist/src/codegen/generate.d.ts.map +0 -1
  159. package/dist/src/codegen/generate.js +0 -352
  160. package/dist/src/codegen/generate.js.map +0 -1
  161. package/dist/src/codegen/graphql.d.ts +0 -36
  162. package/dist/src/codegen/graphql.d.ts.map +0 -1
  163. package/dist/src/codegen/graphql.js +0 -147
  164. package/dist/src/codegen/graphql.js.map +0 -1
  165. package/dist/src/codegen/hygen.d.ts +0 -47
  166. package/dist/src/codegen/hygen.d.ts.map +0 -1
  167. package/dist/src/codegen/hygen.js +0 -230
  168. package/dist/src/codegen/hygen.js.map +0 -1
  169. package/dist/src/codegen/index.d.ts +0 -7
  170. package/dist/src/codegen/index.d.ts.map +0 -1
  171. package/dist/src/codegen/index.js +0 -7
  172. package/dist/src/codegen/index.js.map +0 -1
  173. package/dist/src/codegen/kysely.d.ts +0 -6
  174. package/dist/src/codegen/kysely.d.ts.map +0 -1
  175. package/dist/src/codegen/kysely.js +0 -51
  176. package/dist/src/codegen/kysely.js.map +0 -1
  177. package/dist/src/codegen/types.d.ts +0 -9
  178. package/dist/src/codegen/types.d.ts.map +0 -1
  179. package/dist/src/codegen/types.js +0 -2
  180. package/dist/src/codegen/types.js.map +0 -1
  181. package/dist/src/codegen/utils.d.ts +0 -7
  182. package/dist/src/codegen/utils.d.ts.map +0 -1
  183. package/dist/src/codegen/utils.js +0 -80
  184. package/dist/src/codegen/utils.js.map +0 -1
  185. package/dist/src/create-lib/checkout-project.d.ts +0 -13
  186. package/dist/src/create-lib/checkout-project.d.ts.map +0 -1
  187. package/dist/src/create-lib/checkout-project.js +0 -47
  188. package/dist/src/create-lib/checkout-project.js.map +0 -1
  189. package/dist/src/create-lib/create-project.d.ts +0 -10
  190. package/dist/src/create-lib/create-project.d.ts.map +0 -1
  191. package/dist/src/create-lib/create-project.js +0 -110
  192. package/dist/src/create-lib/create-project.js.map +0 -1
  193. package/dist/src/create-lib/feature-flags.d.ts +0 -4
  194. package/dist/src/create-lib/feature-flags.d.ts.map +0 -1
  195. package/dist/src/create-lib/feature-flags.js +0 -4
  196. package/dist/src/create-lib/feature-flags.js.map +0 -1
  197. package/dist/src/create-lib/index.d.ts +0 -3
  198. package/dist/src/create-lib/index.d.ts.map +0 -1
  199. package/dist/src/create-lib/index.js +0 -3
  200. package/dist/src/create-lib/index.js.map +0 -1
  201. package/dist/src/create-lib/utils.d.ts +0 -8
  202. package/dist/src/create-lib/utils.d.ts.map +0 -1
  203. package/dist/src/create-lib/utils.js +0 -34
  204. package/dist/src/create-lib/utils.js.map +0 -1
  205. package/dist/src/file-builders/boilerplate/build-package-json.test.d.ts +0 -2
  206. package/dist/src/file-builders/boilerplate/build-package-json.test.d.ts.map +0 -1
  207. package/dist/src/file-builders/boilerplate/build-package-json.test.js +0 -31
  208. package/dist/src/file-builders/boilerplate/build-package-json.test.js.map +0 -1
  209. package/dist/src/file-builders/boilerplate/ci-templates.test.d.ts +0 -2
  210. package/dist/src/file-builders/boilerplate/ci-templates.test.d.ts.map +0 -1
  211. package/dist/src/file-builders/boilerplate/ci-templates.test.js +0 -114
  212. package/dist/src/file-builders/boilerplate/ci-templates.test.js.map +0 -1
  213. package/dist/src/file-builders/boilerplate/package.json.d.ts +0 -6
  214. package/dist/src/file-builders/boilerplate/package.json.d.ts.map +0 -1
  215. package/dist/src/file-builders/boilerplate/package.json.js +0 -18
  216. package/dist/src/file-builders/boilerplate/package.json.js.map +0 -1
  217. package/dist/src/file-builders/clis/generate-cli-docs.d.ts +0 -17
  218. package/dist/src/file-builders/clis/generate-cli-docs.d.ts.map +0 -1
  219. package/dist/src/file-builders/clis/generate-cli-docs.js +0 -36
  220. package/dist/src/file-builders/clis/generate-cli-docs.js.map +0 -1
  221. package/dist/src/file-builders/document-editor.d.ts +0 -8
  222. package/dist/src/file-builders/document-editor.d.ts.map +0 -1
  223. package/dist/src/file-builders/document-editor.js +0 -59
  224. package/dist/src/file-builders/document-editor.js.map +0 -1
  225. package/dist/src/file-builders/document-model/document-model.d.ts +0 -8
  226. package/dist/src/file-builders/document-model/document-model.d.ts.map +0 -1
  227. package/dist/src/file-builders/document-model/document-model.js +0 -248
  228. package/dist/src/file-builders/document-model/document-model.js.map +0 -1
  229. package/dist/src/file-builders/document-model/gen-dir.d.ts +0 -3
  230. package/dist/src/file-builders/document-model/gen-dir.d.ts.map +0 -1
  231. package/dist/src/file-builders/document-model/gen-dir.js +0 -210
  232. package/dist/src/file-builders/document-model/gen-dir.js.map +0 -1
  233. package/dist/src/file-builders/document-model/migrate-legacy.d.ts +0 -19
  234. package/dist/src/file-builders/document-model/migrate-legacy.d.ts.map +0 -1
  235. package/dist/src/file-builders/document-model/migrate-legacy.js +0 -104
  236. package/dist/src/file-builders/document-model/migrate-legacy.js.map +0 -1
  237. package/dist/src/file-builders/document-model/root-dir.d.ts +0 -3
  238. package/dist/src/file-builders/document-model/root-dir.d.ts.map +0 -1
  239. package/dist/src/file-builders/document-model/root-dir.js +0 -51
  240. package/dist/src/file-builders/document-model/root-dir.js.map +0 -1
  241. package/dist/src/file-builders/document-model/src-dir.d.ts +0 -3
  242. package/dist/src/file-builders/document-model/src-dir.d.ts.map +0 -1
  243. package/dist/src/file-builders/document-model/src-dir.js +0 -177
  244. package/dist/src/file-builders/document-model/src-dir.js.map +0 -1
  245. package/dist/src/file-builders/document-model/tests-dir.d.ts +0 -3
  246. package/dist/src/file-builders/document-model/tests-dir.d.ts.map +0 -1
  247. package/dist/src/file-builders/document-model/tests-dir.js +0 -127
  248. package/dist/src/file-builders/document-model/tests-dir.js.map +0 -1
  249. package/dist/src/file-builders/document-model/upgrades-dir.d.ts +0 -33
  250. package/dist/src/file-builders/document-model/upgrades-dir.d.ts.map +0 -1
  251. package/dist/src/file-builders/document-model/upgrades-dir.js +0 -127
  252. package/dist/src/file-builders/document-model/upgrades-dir.js.map +0 -1
  253. package/dist/src/file-builders/drive-editor.d.ts +0 -9
  254. package/dist/src/file-builders/drive-editor.d.ts.map +0 -1
  255. package/dist/src/file-builders/drive-editor.js +0 -166
  256. package/dist/src/file-builders/drive-editor.js.map +0 -1
  257. package/dist/src/file-builders/editor-common.d.ts +0 -13
  258. package/dist/src/file-builders/editor-common.d.ts.map +0 -1
  259. package/dist/src/file-builders/editor-common.js +0 -25
  260. package/dist/src/file-builders/editor-common.js.map +0 -1
  261. package/dist/src/file-builders/index-files.d.ts +0 -18
  262. package/dist/src/file-builders/index-files.d.ts.map +0 -1
  263. package/dist/src/file-builders/index-files.js +0 -25
  264. package/dist/src/file-builders/index-files.js.map +0 -1
  265. package/dist/src/file-builders/index.d.ts +0 -11
  266. package/dist/src/file-builders/index.d.ts.map +0 -1
  267. package/dist/src/file-builders/index.js +0 -10
  268. package/dist/src/file-builders/index.js.map +0 -1
  269. package/dist/src/file-builders/module-files.d.ts +0 -58
  270. package/dist/src/file-builders/module-files.d.ts.map +0 -1
  271. package/dist/src/file-builders/module-files.js +0 -239
  272. package/dist/src/file-builders/module-files.js.map +0 -1
  273. package/dist/src/file-builders/processors/analytics.d.ts +0 -3
  274. package/dist/src/file-builders/processors/analytics.d.ts.map +0 -1
  275. package/dist/src/file-builders/processors/analytics.js +0 -35
  276. package/dist/src/file-builders/processors/analytics.js.map +0 -1
  277. package/dist/src/file-builders/processors/processor.d.ts +0 -9
  278. package/dist/src/file-builders/processors/processor.d.ts.map +0 -1
  279. package/dist/src/file-builders/processors/processor.js +0 -142
  280. package/dist/src/file-builders/processors/processor.js.map +0 -1
  281. package/dist/src/file-builders/processors/relational-db.d.ts +0 -3
  282. package/dist/src/file-builders/processors/relational-db.d.ts.map +0 -1
  283. package/dist/src/file-builders/processors/relational-db.js +0 -53
  284. package/dist/src/file-builders/processors/relational-db.js.map +0 -1
  285. package/dist/src/file-builders/processors/types.d.ts +0 -13
  286. package/dist/src/file-builders/processors/types.d.ts.map +0 -1
  287. package/dist/src/file-builders/processors/types.js +0 -2
  288. package/dist/src/file-builders/processors/types.js.map +0 -1
  289. package/dist/src/file-builders/subgraphs.d.ts +0 -6
  290. package/dist/src/file-builders/subgraphs.d.ts.map +0 -1
  291. package/dist/src/file-builders/subgraphs.js +0 -53
  292. package/dist/src/file-builders/subgraphs.js.map +0 -1
  293. package/dist/src/file-builders/types.d.ts +0 -85
  294. package/dist/src/file-builders/types.d.ts.map +0 -1
  295. package/dist/src/file-builders/types.js +0 -2
  296. package/dist/src/file-builders/types.js.map +0 -1
  297. package/dist/src/index.d.ts +0 -5
  298. package/dist/src/index.d.ts.map +0 -1
  299. package/dist/src/index.js +0 -5
  300. package/dist/src/index.js.map +0 -1
  301. package/dist/src/name-builders/get-action-names.d.ts +0 -6
  302. package/dist/src/name-builders/get-action-names.d.ts.map +0 -1
  303. package/dist/src/name-builders/get-action-names.js +0 -16
  304. package/dist/src/name-builders/get-action-names.js.map +0 -1
  305. package/dist/src/name-builders/get-variable-names.d.ts +0 -49
  306. package/dist/src/name-builders/get-variable-names.d.ts.map +0 -1
  307. package/dist/src/name-builders/get-variable-names.js +0 -149
  308. package/dist/src/name-builders/get-variable-names.js.map +0 -1
  309. package/dist/src/name-builders/index.d.ts +0 -3
  310. package/dist/src/name-builders/index.d.ts.map +0 -1
  311. package/dist/src/name-builders/index.js +0 -3
  312. package/dist/src/name-builders/index.js.map +0 -1
  313. package/dist/src/templates/boilerplate/AGENTS.md.d.ts +0 -2
  314. package/dist/src/templates/boilerplate/AGENTS.md.d.ts.map +0 -1
  315. package/dist/src/templates/boilerplate/AGENTS.md.js +0 -526
  316. package/dist/src/templates/boilerplate/AGENTS.md.js.map +0 -1
  317. package/dist/src/templates/boilerplate/CLAUDE.md.d.ts +0 -2
  318. package/dist/src/templates/boilerplate/CLAUDE.md.d.ts.map +0 -1
  319. package/dist/src/templates/boilerplate/CLAUDE.md.js +0 -2
  320. package/dist/src/templates/boilerplate/CLAUDE.md.js.map +0 -1
  321. package/dist/src/templates/boilerplate/LICENSE.d.ts +0 -2
  322. package/dist/src/templates/boilerplate/LICENSE.d.ts.map +0 -1
  323. package/dist/src/templates/boilerplate/LICENSE.js.map +0 -1
  324. package/dist/src/templates/boilerplate/README.md.d.ts +0 -2
  325. package/dist/src/templates/boilerplate/README.md.d.ts.map +0 -1
  326. package/dist/src/templates/boilerplate/README.md.js +0 -194
  327. package/dist/src/templates/boilerplate/README.md.js.map +0 -1
  328. package/dist/src/templates/boilerplate/claude/settings.local.json.d.ts +0 -2
  329. package/dist/src/templates/boilerplate/claude/settings.local.json.d.ts.map +0 -1
  330. package/dist/src/templates/boilerplate/claude/settings.local.json.js +0 -18
  331. package/dist/src/templates/boilerplate/claude/settings.local.json.js.map +0 -1
  332. package/dist/src/templates/boilerplate/cursor/mcp.json.d.ts +0 -2
  333. package/dist/src/templates/boilerplate/cursor/mcp.json.d.ts.map +0 -1
  334. package/dist/src/templates/boilerplate/cursor/mcp.json.js +0 -12
  335. package/dist/src/templates/boilerplate/cursor/mcp.json.js.map +0 -1
  336. package/dist/src/templates/boilerplate/docker/Dockerfile.d.ts +0 -2
  337. package/dist/src/templates/boilerplate/docker/Dockerfile.d.ts.map +0 -1
  338. package/dist/src/templates/boilerplate/docker/Dockerfile.js +0 -144
  339. package/dist/src/templates/boilerplate/docker/Dockerfile.js.map +0 -1
  340. package/dist/src/templates/boilerplate/docker/connect-entrypoint.sh.d.ts +0 -2
  341. package/dist/src/templates/boilerplate/docker/connect-entrypoint.sh.d.ts.map +0 -1
  342. package/dist/src/templates/boilerplate/docker/connect-entrypoint.sh.js +0 -18
  343. package/dist/src/templates/boilerplate/docker/connect-entrypoint.sh.js.map +0 -1
  344. package/dist/src/templates/boilerplate/docker/nginx.conf.d.ts +0 -2
  345. package/dist/src/templates/boilerplate/docker/nginx.conf.d.ts.map +0 -1
  346. package/dist/src/templates/boilerplate/docker/nginx.conf.js +0 -79
  347. package/dist/src/templates/boilerplate/docker/nginx.conf.js.map +0 -1
  348. package/dist/src/templates/boilerplate/docker/switchboard-entrypoint.sh.d.ts +0 -2
  349. package/dist/src/templates/boilerplate/docker/switchboard-entrypoint.sh.d.ts.map +0 -1
  350. package/dist/src/templates/boilerplate/docker/switchboard-entrypoint.sh.js +0 -19
  351. package/dist/src/templates/boilerplate/docker/switchboard-entrypoint.sh.js.map +0 -1
  352. package/dist/src/templates/boilerplate/document-models/document-models.d.ts +0 -2
  353. package/dist/src/templates/boilerplate/document-models/document-models.d.ts.map +0 -1
  354. package/dist/src/templates/boilerplate/document-models/document-models.js +0 -7
  355. package/dist/src/templates/boilerplate/document-models/document-models.js.map +0 -1
  356. package/dist/src/templates/boilerplate/document-models/index.d.ts +0 -2
  357. package/dist/src/templates/boilerplate/document-models/index.d.ts.map +0 -1
  358. package/dist/src/templates/boilerplate/document-models/index.js +0 -2
  359. package/dist/src/templates/boilerplate/document-models/index.js.map +0 -1
  360. package/dist/src/templates/boilerplate/document-models/upgrade-manifests.d.ts +0 -2
  361. package/dist/src/templates/boilerplate/document-models/upgrade-manifests.d.ts.map +0 -1
  362. package/dist/src/templates/boilerplate/document-models/upgrade-manifests.js +0 -7
  363. package/dist/src/templates/boilerplate/document-models/upgrade-manifests.js.map +0 -1
  364. package/dist/src/templates/boilerplate/editors/editors.d.ts +0 -2
  365. package/dist/src/templates/boilerplate/editors/editors.d.ts.map +0 -1
  366. package/dist/src/templates/boilerplate/editors/editors.js +0 -7
  367. package/dist/src/templates/boilerplate/editors/editors.js.map +0 -1
  368. package/dist/src/templates/boilerplate/editors/index.d.ts +0 -2
  369. package/dist/src/templates/boilerplate/editors/index.d.ts.map +0 -1
  370. package/dist/src/templates/boilerplate/editors/index.js +0 -2
  371. package/dist/src/templates/boilerplate/editors/index.js.map +0 -1
  372. package/dist/src/templates/boilerplate/eslint.config.js.d.ts +0 -2
  373. package/dist/src/templates/boilerplate/eslint.config.js.d.ts.map +0 -1
  374. package/dist/src/templates/boilerplate/eslint.config.js.js +0 -140
  375. package/dist/src/templates/boilerplate/eslint.config.js.js.map +0 -1
  376. package/dist/src/templates/boilerplate/gemini/settings.json.d.ts +0 -2
  377. package/dist/src/templates/boilerplate/gemini/settings.json.d.ts.map +0 -1
  378. package/dist/src/templates/boilerplate/gemini/settings.json.js +0 -12
  379. package/dist/src/templates/boilerplate/gemini/settings.json.js.map +0 -1
  380. package/dist/src/templates/boilerplate/github/sync-and-publish.yml.d.ts +0 -2
  381. package/dist/src/templates/boilerplate/github/sync-and-publish.yml.d.ts.map +0 -1
  382. package/dist/src/templates/boilerplate/github/sync-and-publish.yml.js +0 -376
  383. package/dist/src/templates/boilerplate/github/sync-and-publish.yml.js.map +0 -1
  384. package/dist/src/templates/boilerplate/gitignore.d.ts +0 -2
  385. package/dist/src/templates/boilerplate/gitignore.d.ts.map +0 -1
  386. package/dist/src/templates/boilerplate/gitignore.js +0 -11
  387. package/dist/src/templates/boilerplate/gitignore.js.map +0 -1
  388. package/dist/src/templates/boilerplate/index.d.ts +0 -2
  389. package/dist/src/templates/boilerplate/index.d.ts.map +0 -1
  390. package/dist/src/templates/boilerplate/index.html.d.ts +0 -2
  391. package/dist/src/templates/boilerplate/index.html.d.ts.map +0 -1
  392. package/dist/src/templates/boilerplate/index.html.js +0 -31
  393. package/dist/src/templates/boilerplate/index.html.js.map +0 -1
  394. package/dist/src/templates/boilerplate/index.html.legacy.d.ts +0 -2
  395. package/dist/src/templates/boilerplate/index.html.legacy.d.ts.map +0 -1
  396. package/dist/src/templates/boilerplate/index.html.legacy.js +0 -36
  397. package/dist/src/templates/boilerplate/index.html.legacy.js.map +0 -1
  398. package/dist/src/templates/boilerplate/index.js +0 -11
  399. package/dist/src/templates/boilerplate/index.js.map +0 -1
  400. package/dist/src/templates/boilerplate/mcp.json.d.ts +0 -2
  401. package/dist/src/templates/boilerplate/mcp.json.d.ts.map +0 -1
  402. package/dist/src/templates/boilerplate/mcp.json.js +0 -12
  403. package/dist/src/templates/boilerplate/mcp.json.js.map +0 -1
  404. package/dist/src/templates/boilerplate/npmrc.d.ts +0 -2
  405. package/dist/src/templates/boilerplate/npmrc.d.ts.map +0 -1
  406. package/dist/src/templates/boilerplate/npmrc.js +0 -2
  407. package/dist/src/templates/boilerplate/npmrc.js.map +0 -1
  408. package/dist/src/templates/boilerplate/package.json.d.ts +0 -2
  409. package/dist/src/templates/boilerplate/package.json.d.ts.map +0 -1
  410. package/dist/src/templates/boilerplate/package.json.js +0 -124
  411. package/dist/src/templates/boilerplate/package.json.js.map +0 -1
  412. package/dist/src/templates/boilerplate/package.json.legacy.d.ts +0 -55
  413. package/dist/src/templates/boilerplate/package.json.legacy.d.ts.map +0 -1
  414. package/dist/src/templates/boilerplate/package.json.legacy.js +0 -55
  415. package/dist/src/templates/boilerplate/package.json.legacy.js.map +0 -1
  416. package/dist/src/templates/boilerplate/powerhouse.config.json.d.ts +0 -6
  417. package/dist/src/templates/boilerplate/powerhouse.config.json.d.ts.map +0 -1
  418. package/dist/src/templates/boilerplate/powerhouse.config.json.js +0 -46
  419. package/dist/src/templates/boilerplate/powerhouse.config.json.js.map +0 -1
  420. package/dist/src/templates/boilerplate/powerhouse.manifest.json.d.ts +0 -2
  421. package/dist/src/templates/boilerplate/powerhouse.manifest.json.d.ts.map +0 -1
  422. package/dist/src/templates/boilerplate/powerhouse.manifest.json.js +0 -19
  423. package/dist/src/templates/boilerplate/powerhouse.manifest.json.js.map +0 -1
  424. package/dist/src/templates/boilerplate/style.css.d.ts +0 -2
  425. package/dist/src/templates/boilerplate/style.css.d.ts.map +0 -1
  426. package/dist/src/templates/boilerplate/style.css.js +0 -17
  427. package/dist/src/templates/boilerplate/style.css.js.map +0 -1
  428. package/dist/src/templates/boilerplate/subgraphs/index.d.ts +0 -2
  429. package/dist/src/templates/boilerplate/subgraphs/index.d.ts.map +0 -1
  430. package/dist/src/templates/boilerplate/subgraphs/index.js +0 -2
  431. package/dist/src/templates/boilerplate/subgraphs/index.js.map +0 -1
  432. package/dist/src/templates/boilerplate/tsconfig.json.d.ts +0 -2
  433. package/dist/src/templates/boilerplate/tsconfig.json.d.ts.map +0 -1
  434. package/dist/src/templates/boilerplate/tsconfig.json.js +0 -46
  435. package/dist/src/templates/boilerplate/tsconfig.json.js.map +0 -1
  436. package/dist/src/templates/boilerplate/vite.config.ts.d.ts +0 -2
  437. package/dist/src/templates/boilerplate/vite.config.ts.d.ts.map +0 -1
  438. package/dist/src/templates/boilerplate/vite.config.ts.js +0 -21
  439. package/dist/src/templates/boilerplate/vite.config.ts.js.map +0 -1
  440. package/dist/src/templates/boilerplate/vitest.config.ts.d.ts +0 -2
  441. package/dist/src/templates/boilerplate/vitest.config.ts.d.ts.map +0 -1
  442. package/dist/src/templates/boilerplate/vitest.config.ts.js +0 -21
  443. package/dist/src/templates/boilerplate/vitest.config.ts.js.map +0 -1
  444. package/dist/src/templates/cli-docs/docs-from-cli-help.d.ts +0 -8
  445. package/dist/src/templates/cli-docs/docs-from-cli-help.d.ts.map +0 -1
  446. package/dist/src/templates/cli-docs/docs-from-cli-help.js +0 -86
  447. package/dist/src/templates/cli-docs/docs-from-cli-help.js.map +0 -1
  448. package/dist/src/templates/document-editor/editor.d.ts +0 -5
  449. package/dist/src/templates/document-editor/editor.d.ts.map +0 -1
  450. package/dist/src/templates/document-editor/editor.js +0 -85
  451. package/dist/src/templates/document-editor/editor.js.map +0 -1
  452. package/dist/src/templates/document-editor/module.d.ts +0 -7
  453. package/dist/src/templates/document-editor/module.d.ts.map +0 -1
  454. package/dist/src/templates/document-editor/module.js +0 -16
  455. package/dist/src/templates/document-editor/module.js.map +0 -1
  456. package/dist/src/templates/document-model/actions.d.ts +0 -3
  457. package/dist/src/templates/document-model/actions.d.ts.map +0 -1
  458. package/dist/src/templates/document-model/actions.js +0 -28
  459. package/dist/src/templates/document-model/actions.js.map +0 -1
  460. package/dist/src/templates/document-model/gen/actions.d.ts +0 -3
  461. package/dist/src/templates/document-model/gen/actions.d.ts.map +0 -1
  462. package/dist/src/templates/document-model/gen/actions.js +0 -40
  463. package/dist/src/templates/document-model/gen/actions.js.map +0 -1
  464. package/dist/src/templates/document-model/gen/controller.d.ts +0 -3
  465. package/dist/src/templates/document-model/gen/controller.d.ts.map +0 -1
  466. package/dist/src/templates/document-model/gen/controller.js +0 -12
  467. package/dist/src/templates/document-model/gen/controller.js.map +0 -1
  468. package/dist/src/templates/document-model/gen/creators.d.ts +0 -3
  469. package/dist/src/templates/document-model/gen/creators.d.ts.map +0 -1
  470. package/dist/src/templates/document-model/gen/creators.js +0 -16
  471. package/dist/src/templates/document-model/gen/creators.js.map +0 -1
  472. package/dist/src/templates/document-model/gen/document-schema.d.ts +0 -3
  473. package/dist/src/templates/document-model/gen/document-schema.d.ts.map +0 -1
  474. package/dist/src/templates/document-model/gen/document-schema.js +0 -56
  475. package/dist/src/templates/document-model/gen/document-schema.js.map +0 -1
  476. package/dist/src/templates/document-model/gen/document-type.d.ts +0 -3
  477. package/dist/src/templates/document-model/gen/document-type.d.ts.map +0 -1
  478. package/dist/src/templates/document-model/gen/document-type.js +0 -5
  479. package/dist/src/templates/document-model/gen/document-type.js.map +0 -1
  480. package/dist/src/templates/document-model/gen/index.d.ts +0 -3
  481. package/dist/src/templates/document-model/gen/index.d.ts.map +0 -1
  482. package/dist/src/templates/document-model/gen/index.js +0 -30
  483. package/dist/src/templates/document-model/gen/index.js.map +0 -1
  484. package/dist/src/templates/document-model/gen/modules/actions.d.ts +0 -8
  485. package/dist/src/templates/document-model/gen/modules/actions.d.ts.map +0 -1
  486. package/dist/src/templates/document-model/gen/modules/actions.js +0 -39
  487. package/dist/src/templates/document-model/gen/modules/actions.js.map +0 -1
  488. package/dist/src/templates/document-model/gen/modules/creators.d.ts +0 -3
  489. package/dist/src/templates/document-model/gen/modules/creators.d.ts.map +0 -1
  490. package/dist/src/templates/document-model/gen/modules/creators.js +0 -97
  491. package/dist/src/templates/document-model/gen/modules/creators.js.map +0 -1
  492. package/dist/src/templates/document-model/gen/modules/error.d.ts +0 -3
  493. package/dist/src/templates/document-model/gen/modules/error.d.ts.map +0 -1
  494. package/dist/src/templates/document-model/gen/modules/error.js +0 -79
  495. package/dist/src/templates/document-model/gen/modules/error.js.map +0 -1
  496. package/dist/src/templates/document-model/gen/modules/operations.d.ts +0 -3
  497. package/dist/src/templates/document-model/gen/modules/operations.d.ts.map +0 -1
  498. package/dist/src/templates/document-model/gen/modules/operations.js +0 -61
  499. package/dist/src/templates/document-model/gen/modules/operations.js.map +0 -1
  500. package/dist/src/templates/document-model/gen/ph-factories.d.ts +0 -3
  501. package/dist/src/templates/document-model/gen/ph-factories.d.ts.map +0 -1
  502. package/dist/src/templates/document-model/gen/ph-factories.js +0 -93
  503. package/dist/src/templates/document-model/gen/ph-factories.js.map +0 -1
  504. package/dist/src/templates/document-model/gen/reducer.d.ts +0 -3
  505. package/dist/src/templates/document-model/gen/reducer.d.ts.map +0 -1
  506. package/dist/src/templates/document-model/gen/reducer.js +0 -106
  507. package/dist/src/templates/document-model/gen/reducer.js.map +0 -1
  508. package/dist/src/templates/document-model/gen/schema/index.d.ts +0 -2
  509. package/dist/src/templates/document-model/gen/schema/index.d.ts.map +0 -1
  510. package/dist/src/templates/document-model/gen/schema/index.js +0 -6
  511. package/dist/src/templates/document-model/gen/schema/index.js.map +0 -1
  512. package/dist/src/templates/document-model/gen/types.d.ts +0 -3
  513. package/dist/src/templates/document-model/gen/types.d.ts.map +0 -1
  514. package/dist/src/templates/document-model/gen/types.js +0 -38
  515. package/dist/src/templates/document-model/gen/types.js.map +0 -1
  516. package/dist/src/templates/document-model/gen/utils.d.ts +0 -3
  517. package/dist/src/templates/document-model/gen/utils.d.ts.map +0 -1
  518. package/dist/src/templates/document-model/gen/utils.js +0 -77
  519. package/dist/src/templates/document-model/gen/utils.js.map +0 -1
  520. package/dist/src/templates/document-model/hooks.d.ts +0 -3
  521. package/dist/src/templates/document-model/hooks.d.ts.map +0 -1
  522. package/dist/src/templates/document-model/hooks.js +0 -52
  523. package/dist/src/templates/document-model/hooks.js.map +0 -1
  524. package/dist/src/templates/document-model/index.d.ts +0 -2
  525. package/dist/src/templates/document-model/index.d.ts.map +0 -1
  526. package/dist/src/templates/document-model/index.js +0 -9
  527. package/dist/src/templates/document-model/index.js.map +0 -1
  528. package/dist/src/templates/document-model/module.d.ts +0 -8
  529. package/dist/src/templates/document-model/module.d.ts.map +0 -1
  530. package/dist/src/templates/document-model/module.js +0 -24
  531. package/dist/src/templates/document-model/module.js.map +0 -1
  532. package/dist/src/templates/document-model/src/index.d.ts +0 -2
  533. package/dist/src/templates/document-model/src/index.d.ts.map +0 -1
  534. package/dist/src/templates/document-model/src/index.js +0 -5
  535. package/dist/src/templates/document-model/src/index.js.map +0 -1
  536. package/dist/src/templates/document-model/src/utils.d.ts +0 -2
  537. package/dist/src/templates/document-model/src/utils.d.ts.map +0 -1
  538. package/dist/src/templates/document-model/src/utils.js +0 -5
  539. package/dist/src/templates/document-model/src/utils.js.map +0 -1
  540. package/dist/src/templates/document-model/tests/document-model.test.d.ts +0 -3
  541. package/dist/src/templates/document-model/tests/document-model.test.d.ts.map +0 -1
  542. package/dist/src/templates/document-model/tests/document-model.test.js +0 -121
  543. package/dist/src/templates/document-model/tests/document-model.test.js.map +0 -1
  544. package/dist/src/templates/document-model/tests/module.test.d.ts +0 -6
  545. package/dist/src/templates/document-model/tests/module.test.d.ts.map +0 -1
  546. package/dist/src/templates/document-model/tests/module.test.js +0 -87
  547. package/dist/src/templates/document-model/tests/module.test.js.map +0 -1
  548. package/dist/src/templates/document-model/upgrades/upgrade-manifest.d.ts +0 -5
  549. package/dist/src/templates/document-model/upgrades/upgrade-manifest.d.ts.map +0 -1
  550. package/dist/src/templates/document-model/upgrades/upgrade-manifest.js +0 -13
  551. package/dist/src/templates/document-model/upgrades/upgrade-manifest.js.map +0 -1
  552. package/dist/src/templates/document-model/upgrades/upgrade-transition.d.ts +0 -7
  553. package/dist/src/templates/document-model/upgrades/upgrade-transition.d.ts.map +0 -1
  554. package/dist/src/templates/document-model/upgrades/upgrade-transition.js +0 -22
  555. package/dist/src/templates/document-model/upgrades/upgrade-transition.js.map +0 -1
  556. package/dist/src/templates/document-model/utils.d.ts +0 -3
  557. package/dist/src/templates/document-model/utils.d.ts.map +0 -1
  558. package/dist/src/templates/document-model/utils.js +0 -11
  559. package/dist/src/templates/document-model/utils.js.map +0 -1
  560. package/dist/src/templates/drive-editor/components/CreateDocument.d.ts +0 -2
  561. package/dist/src/templates/drive-editor/components/CreateDocument.d.ts.map +0 -1
  562. package/dist/src/templates/drive-editor/components/CreateDocument.js +0 -58
  563. package/dist/src/templates/drive-editor/components/CreateDocument.js.map +0 -1
  564. package/dist/src/templates/drive-editor/components/DriveContents.d.ts +0 -2
  565. package/dist/src/templates/drive-editor/components/DriveContents.d.ts.map +0 -1
  566. package/dist/src/templates/drive-editor/components/DriveContents.js +0 -24
  567. package/dist/src/templates/drive-editor/components/DriveContents.js.map +0 -1
  568. package/dist/src/templates/drive-editor/components/DriveExplorer.d.ts +0 -2
  569. package/dist/src/templates/drive-editor/components/DriveExplorer.d.ts.map +0 -1
  570. package/dist/src/templates/drive-editor/components/DriveExplorer.js +0 -32
  571. package/dist/src/templates/drive-editor/components/DriveExplorer.js.map +0 -1
  572. package/dist/src/templates/drive-editor/components/EmptyState.d.ts +0 -2
  573. package/dist/src/templates/drive-editor/components/EmptyState.d.ts.map +0 -1
  574. package/dist/src/templates/drive-editor/components/EmptyState.js +0 -19
  575. package/dist/src/templates/drive-editor/components/EmptyState.js.map +0 -1
  576. package/dist/src/templates/drive-editor/components/Files.d.ts +0 -2
  577. package/dist/src/templates/drive-editor/components/Files.d.ts.map +0 -1
  578. package/dist/src/templates/drive-editor/components/Files.js +0 -30
  579. package/dist/src/templates/drive-editor/components/Files.js.map +0 -1
  580. package/dist/src/templates/drive-editor/components/FolderTree.d.ts +0 -2
  581. package/dist/src/templates/drive-editor/components/FolderTree.d.ts.map +0 -1
  582. package/dist/src/templates/drive-editor/components/FolderTree.js +0 -108
  583. package/dist/src/templates/drive-editor/components/FolderTree.js.map +0 -1
  584. package/dist/src/templates/drive-editor/components/Folders.d.ts +0 -2
  585. package/dist/src/templates/drive-editor/components/Folders.d.ts.map +0 -1
  586. package/dist/src/templates/drive-editor/components/Folders.js +0 -28
  587. package/dist/src/templates/drive-editor/components/Folders.js.map +0 -1
  588. package/dist/src/templates/drive-editor/components/NavigationBreadcrumbs.d.ts +0 -2
  589. package/dist/src/templates/drive-editor/components/NavigationBreadcrumbs.d.ts.map +0 -1
  590. package/dist/src/templates/drive-editor/components/NavigationBreadcrumbs.js +0 -14
  591. package/dist/src/templates/drive-editor/components/NavigationBreadcrumbs.js.map +0 -1
  592. package/dist/src/templates/drive-editor/config.d.ts +0 -5
  593. package/dist/src/templates/drive-editor/config.d.ts.map +0 -1
  594. package/dist/src/templates/drive-editor/config.js +0 -11
  595. package/dist/src/templates/drive-editor/config.js.map +0 -1
  596. package/dist/src/templates/drive-editor/editor.d.ts +0 -2
  597. package/dist/src/templates/drive-editor/editor.d.ts.map +0 -1
  598. package/dist/src/templates/drive-editor/editor.js +0 -20
  599. package/dist/src/templates/drive-editor/editor.js.map +0 -1
  600. package/dist/src/templates/index.d.ts +0 -80
  601. package/dist/src/templates/index.d.ts.map +0 -1
  602. package/dist/src/templates/index.js +0 -80
  603. package/dist/src/templates/index.js.map +0 -1
  604. package/dist/src/templates/processors/analytics/factory.d.ts +0 -6
  605. package/dist/src/templates/processors/analytics/factory.d.ts.map +0 -1
  606. package/dist/src/templates/processors/analytics/factory.js +0 -26
  607. package/dist/src/templates/processors/analytics/factory.js.map +0 -1
  608. package/dist/src/templates/processors/analytics/index.d.ts +0 -4
  609. package/dist/src/templates/processors/analytics/index.d.ts.map +0 -1
  610. package/dist/src/templates/processors/analytics/index.js +0 -34
  611. package/dist/src/templates/processors/analytics/index.js.map +0 -1
  612. package/dist/src/templates/processors/factory.d.ts +0 -2
  613. package/dist/src/templates/processors/factory.d.ts.map +0 -1
  614. package/dist/src/templates/processors/factory.js +0 -60
  615. package/dist/src/templates/processors/factory.js.map +0 -1
  616. package/dist/src/templates/processors/index.d.ts +0 -2
  617. package/dist/src/templates/processors/index.d.ts.map +0 -1
  618. package/dist/src/templates/processors/index.js +0 -10
  619. package/dist/src/templates/processors/index.js.map +0 -1
  620. package/dist/src/templates/processors/relational-db/factory.d.ts +0 -6
  621. package/dist/src/templates/processors/relational-db/factory.d.ts.map +0 -1
  622. package/dist/src/templates/processors/relational-db/factory.js +0 -40
  623. package/dist/src/templates/processors/relational-db/factory.js.map +0 -1
  624. package/dist/src/templates/processors/relational-db/index.d.ts +0 -4
  625. package/dist/src/templates/processors/relational-db/index.d.ts.map +0 -1
  626. package/dist/src/templates/processors/relational-db/index.js +0 -28
  627. package/dist/src/templates/processors/relational-db/index.js.map +0 -1
  628. package/dist/src/templates/processors/relational-db/migrations.d.ts +0 -2
  629. package/dist/src/templates/processors/relational-db/migrations.d.ts.map +0 -1
  630. package/dist/src/templates/processors/relational-db/migrations.js +0 -23
  631. package/dist/src/templates/processors/relational-db/migrations.js.map +0 -1
  632. package/dist/src/templates/processors/relational-db/schema.d.ts +0 -2
  633. package/dist/src/templates/processors/relational-db/schema.d.ts.map +0 -1
  634. package/dist/src/templates/processors/relational-db/schema.js +0 -12
  635. package/dist/src/templates/processors/relational-db/schema.js.map +0 -1
  636. package/dist/src/templates/processors/utils.d.ts +0 -2
  637. package/dist/src/templates/processors/utils.d.ts.map +0 -1
  638. package/dist/src/templates/processors/utils.js +0 -6
  639. package/dist/src/templates/processors/utils.js.map +0 -1
  640. package/dist/src/ts-morph-generator/__tests__/ReducerGenerator.test.d.ts +0 -2
  641. package/dist/src/ts-morph-generator/__tests__/ReducerGenerator.test.d.ts.map +0 -1
  642. package/dist/src/ts-morph-generator/__tests__/ReducerGenerator.test.js +0 -491
  643. package/dist/src/ts-morph-generator/__tests__/ReducerGenerator.test.js.map +0 -1
  644. package/dist/src/ts-morph-generator/core/FileGenerator.d.ts +0 -11
  645. package/dist/src/ts-morph-generator/core/FileGenerator.d.ts.map +0 -1
  646. package/dist/src/ts-morph-generator/core/FileGenerator.js +0 -11
  647. package/dist/src/ts-morph-generator/core/FileGenerator.js.map +0 -1
  648. package/dist/src/ts-morph-generator/core/GenerationContext.d.ts +0 -39
  649. package/dist/src/ts-morph-generator/core/GenerationContext.d.ts.map +0 -1
  650. package/dist/src/ts-morph-generator/core/GenerationContext.js +0 -2
  651. package/dist/src/ts-morph-generator/core/GenerationContext.js.map +0 -1
  652. package/dist/src/ts-morph-generator/core/ReducerGenerator.d.ts +0 -12
  653. package/dist/src/ts-morph-generator/core/ReducerGenerator.d.ts.map +0 -1
  654. package/dist/src/ts-morph-generator/core/ReducerGenerator.js +0 -147
  655. package/dist/src/ts-morph-generator/core/ReducerGenerator.js.map +0 -1
  656. package/dist/src/ts-morph-generator/core/TSMorphCodeGenerator.d.ts +0 -20
  657. package/dist/src/ts-morph-generator/core/TSMorphCodeGenerator.d.ts.map +0 -1
  658. package/dist/src/ts-morph-generator/core/TSMorphCodeGenerator.js +0 -120
  659. package/dist/src/ts-morph-generator/core/TSMorphCodeGenerator.js.map +0 -1
  660. package/dist/src/ts-morph-generator/index.d.ts +0 -4
  661. package/dist/src/ts-morph-generator/index.d.ts.map +0 -1
  662. package/dist/src/ts-morph-generator/index.js +0 -4
  663. package/dist/src/ts-morph-generator/index.js.map +0 -1
  664. package/dist/src/ts-morph-generator/utilities/DeclarationManager.d.ts +0 -5
  665. package/dist/src/ts-morph-generator/utilities/DeclarationManager.d.ts.map +0 -1
  666. package/dist/src/ts-morph-generator/utilities/DeclarationManager.js +0 -10
  667. package/dist/src/ts-morph-generator/utilities/DeclarationManager.js.map +0 -1
  668. package/dist/src/ts-morph-generator/utilities/DirectoryManager.d.ts +0 -13
  669. package/dist/src/ts-morph-generator/utilities/DirectoryManager.d.ts.map +0 -1
  670. package/dist/src/ts-morph-generator/utilities/DirectoryManager.js +0 -45
  671. package/dist/src/ts-morph-generator/utilities/DirectoryManager.js.map +0 -1
  672. package/dist/src/ts-morph-generator/utilities/ImportManager.d.ts +0 -15
  673. package/dist/src/ts-morph-generator/utilities/ImportManager.d.ts.map +0 -1
  674. package/dist/src/ts-morph-generator/utilities/ImportManager.js +0 -50
  675. package/dist/src/ts-morph-generator/utilities/ImportManager.js.map +0 -1
  676. package/dist/src/ts-morph-generator/utilities/index.d.ts +0 -4
  677. package/dist/src/ts-morph-generator/utilities/index.d.ts.map +0 -1
  678. package/dist/src/ts-morph-generator/utilities/index.js +0 -4
  679. package/dist/src/ts-morph-generator/utilities/index.js.map +0 -1
  680. package/dist/src/utils/cli.d.ts +0 -26
  681. package/dist/src/utils/cli.d.ts.map +0 -1
  682. package/dist/src/utils/cli.js +0 -57
  683. package/dist/src/utils/cli.js.map +0 -1
  684. package/dist/src/utils/constants.d.ts +0 -11
  685. package/dist/src/utils/constants.d.ts.map +0 -1
  686. package/dist/src/utils/constants.js +0 -11
  687. package/dist/src/utils/constants.js.map +0 -1
  688. package/dist/src/utils/document-type-metadata.d.ts +0 -15
  689. package/dist/src/utils/document-type-metadata.d.ts.map +0 -1
  690. package/dist/src/utils/document-type-metadata.js +0 -70
  691. package/dist/src/utils/document-type-metadata.js.map +0 -1
  692. package/dist/src/utils/format-with-prettier.d.ts +0 -5
  693. package/dist/src/utils/format-with-prettier.d.ts.map +0 -1
  694. package/dist/src/utils/format-with-prettier.js +0 -14
  695. package/dist/src/utils/format-with-prettier.js.map +0 -1
  696. package/dist/src/utils/index.d.ts +0 -12
  697. package/dist/src/utils/index.d.ts.map +0 -1
  698. package/dist/src/utils/index.js +0 -12
  699. package/dist/src/utils/index.js.map +0 -1
  700. package/dist/src/utils/mock.d.ts +0 -3
  701. package/dist/src/utils/mock.d.ts.map +0 -1
  702. package/dist/src/utils/mock.js +0 -5
  703. package/dist/src/utils/mock.js.map +0 -1
  704. package/dist/src/utils/source-files.d.ts +0 -25
  705. package/dist/src/utils/source-files.d.ts.map +0 -1
  706. package/dist/src/utils/source-files.js +0 -56
  707. package/dist/src/utils/source-files.js.map +0 -1
  708. package/dist/src/utils/syntax-builders.d.ts +0 -8
  709. package/dist/src/utils/syntax-builders.d.ts.map +0 -1
  710. package/dist/src/utils/syntax-builders.js +0 -72
  711. package/dist/src/utils/syntax-builders.js.map +0 -1
  712. package/dist/src/utils/syntax-getters.d.ts +0 -10
  713. package/dist/src/utils/syntax-getters.d.ts.map +0 -1
  714. package/dist/src/utils/syntax-getters.js +0 -24
  715. package/dist/src/utils/syntax-getters.js.map +0 -1
  716. package/dist/src/utils/ts-morph-project.d.ts +0 -24
  717. package/dist/src/utils/ts-morph-project.d.ts.map +0 -1
  718. package/dist/src/utils/ts-morph-project.js +0 -28
  719. package/dist/src/utils/ts-morph-project.js.map +0 -1
  720. package/dist/src/utils/unsafe-utils.d.ts +0 -8
  721. package/dist/src/utils/unsafe-utils.d.ts.map +0 -1
  722. package/dist/src/utils/unsafe-utils.js +0 -23
  723. package/dist/src/utils/unsafe-utils.js.map +0 -1
  724. package/dist/src/utils/validation.d.ts +0 -18
  725. package/dist/src/utils/validation.d.ts.map +0 -1
  726. package/dist/src/utils/validation.js +0 -138
  727. package/dist/src/utils/validation.js.map +0 -1
  728. package/dist/tsconfig.tsbuildinfo +0 -1
  729. package/dist/vitest.config.d.ts +0 -3
  730. package/dist/vitest.config.d.ts.map +0 -1
  731. package/dist/vitest.config.js +0 -21
  732. package/dist/vitest.config.js.map +0 -1
@@ -0,0 +1 @@
1
+ {"version":3,"file":"file-builders-BraThjto.mjs","names":["map","merge","map","merge","#length","#scanner","#createError","ts","YAML.parse","fs","claudeTemplate","path","join","path","makeIndexFile","makeProcessorFile","makeFactoryFile"],"sources":["../../../node_modules/.pnpm/@jsr+std__yaml@1.0.12/node_modules/@jsr/std__yaml/_chars.js","../../../node_modules/.pnpm/@jsr+std__yaml@1.0.12/node_modules/@jsr/std__yaml/_type/binary.js","../../../node_modules/.pnpm/@jsr+std__yaml@1.0.12/node_modules/@jsr/std__yaml/_type/bool.js","../../../node_modules/.pnpm/@jsr+std__yaml@1.0.12/node_modules/@jsr/std__yaml/_utils.js","../../../node_modules/.pnpm/@jsr+std__yaml@1.0.12/node_modules/@jsr/std__yaml/_type/float.js","../../../node_modules/.pnpm/@jsr+std__yaml@1.0.12/node_modules/@jsr/std__yaml/_type/int.js","../../../node_modules/.pnpm/@jsr+std__yaml@1.0.12/node_modules/@jsr/std__yaml/_type/map.js","../../../node_modules/.pnpm/@jsr+std__yaml@1.0.12/node_modules/@jsr/std__yaml/_type/merge.js","../../../node_modules/.pnpm/@jsr+std__yaml@1.0.12/node_modules/@jsr/std__yaml/_type/nil.js","../../../node_modules/.pnpm/@jsr+std__yaml@1.0.12/node_modules/@jsr/std__yaml/_type/omap.js","../../../node_modules/.pnpm/@jsr+std__yaml@1.0.12/node_modules/@jsr/std__yaml/_type/pairs.js","../../../node_modules/.pnpm/@jsr+std__yaml@1.0.12/node_modules/@jsr/std__yaml/_type/regexp.js","../../../node_modules/.pnpm/@jsr+std__yaml@1.0.12/node_modules/@jsr/std__yaml/_type/seq.js","../../../node_modules/.pnpm/@jsr+std__yaml@1.0.12/node_modules/@jsr/std__yaml/_type/set.js","../../../node_modules/.pnpm/@jsr+std__yaml@1.0.12/node_modules/@jsr/std__yaml/_type/str.js","../../../node_modules/.pnpm/@jsr+std__yaml@1.0.12/node_modules/@jsr/std__yaml/_type/timestamp.js","../../../node_modules/.pnpm/@jsr+std__yaml@1.0.12/node_modules/@jsr/std__yaml/_type/undefined.js","../../../node_modules/.pnpm/@jsr+std__yaml@1.0.12/node_modules/@jsr/std__yaml/_schema.js","../../../node_modules/.pnpm/@jsr+std__yaml@1.0.12/node_modules/@jsr/std__yaml/_loader_state.js","../../../node_modules/.pnpm/@jsr+std__yaml@1.0.12/node_modules/@jsr/std__yaml/parse.js","../../../node_modules/.pnpm/@jsr+tmpl__core@0.6.3/node_modules/@jsr/tmpl__core/mod.js","../src/templates/app/components/CreateDocument.ts","../src/templates/app/components/DriveContents.ts","../src/templates/app/components/DriveExplorer.ts","../src/templates/app/components/EmptyState.ts","../src/templates/app/components/Files.ts","../src/templates/app/components/Folders.ts","../src/templates/app/components/FolderTree.ts","../src/templates/app/components/NavigationBreadcrumbs.ts","../src/templates/app/config.ts","../src/templates/app/editor.ts","../src/templates/boilerplate/AGENTS.md.ts","../src/templates/boilerplate/claude/settings.local.json.ts","../src/templates/boilerplate/cursor/mcp.json.ts","../src/templates/boilerplate/docker/connect-entrypoint.sh.ts","../src/templates/boilerplate/docker/Dockerfile.ts","../src/templates/boilerplate/docker/nginx.conf.ts","../src/templates/boilerplate/docker/switchboard-entrypoint.sh.ts","../src/templates/boilerplate/document-models/document-models.ts","../src/templates/boilerplate/document-models/index.ts","../src/templates/boilerplate/document-models/upgrade-manifests.ts","../src/templates/boilerplate/editors/editors.ts","../src/templates/boilerplate/editors/index.ts","../src/templates/boilerplate/eslint.config.js.ts","../src/templates/boilerplate/gemini/settings.json.ts","../src/templates/boilerplate/github/sync-and-publish.yml.ts","../src/templates/boilerplate/gitignore.ts","../src/templates/boilerplate/index.html.ts","../src/templates/boilerplate/index.html.legacy.ts","../src/templates/boilerplate/index.ts","../src/templates/boilerplate/LICENSE.ts","../src/templates/boilerplate/main.tsx.ts","../src/templates/boilerplate/mcp.json.ts","../src/templates/boilerplate/npmrc.ts","../src/file-builders/constants.ts","../src/templates/boilerplate/package.json.ts","../src/templates/boilerplate/package.json.legacy.ts","../src/templates/boilerplate/powerhouse.config.json.ts","../src/templates/boilerplate/powerhouse.manifest.json.ts","../src/templates/boilerplate/README.md.ts","../src/templates/boilerplate/style.css.ts","../src/templates/boilerplate/subgraphs/index.ts","../src/templates/boilerplate/tsconfig.json.ts","../src/templates/boilerplate/vitest.config.ts.ts","../src/templates/cli-docs/docs-from-cli-help.ts","../src/templates/document-editor/editor.ts","../src/templates/document-editor/module.ts","../src/templates/document-model/actions.ts","../src/templates/document-model/gen/actions.ts","../src/templates/document-model/gen/controller.ts","../src/templates/document-model/gen/creators.ts","../src/templates/document-model/gen/document-schema.ts","../src/templates/document-model/gen/document-type.ts","../src/templates/document-model/gen/index.ts","../src/name-builders/get-action-names.ts","../src/name-builders/get-variable-names.ts","../src/templates/document-model/gen/modules/actions.ts","../src/templates/document-model/gen/modules/creators.ts","../src/templates/document-model/gen/modules/error.ts","../src/templates/document-model/gen/modules/operations.ts","../src/templates/document-model/gen/ph-factories.ts","../src/templates/document-model/gen/reducer.ts","../src/templates/document-model/gen/schema/index.ts","../src/templates/document-model/gen/types.ts","../src/templates/document-model/gen/utils.ts","../src/templates/document-model/hooks.ts","../src/templates/document-model/index.ts","../src/templates/document-model/module.ts","../src/templates/document-model/src/index.ts","../src/templates/document-model/src/utils.ts","../src/templates/document-model/tests/document-model.test.ts","../src/templates/document-model/tests/module.test.ts","../src/templates/document-model/upgrades/upgrade-manifest.ts","../src/templates/document-model/upgrades/upgrade-transition.ts","../src/templates/document-model/utils.ts","../src/templates/processors/utils.ts","../src/templates/processors/analytics/factory.ts","../src/templates/processors/analytics/index.ts","../src/templates/processors/analytics/processor.ts","../src/templates/processors/factory-builders.ts","../src/templates/processors/factory.ts","../src/templates/processors/index.ts","../src/templates/processors/relational-db/factory.ts","../src/templates/processors/relational-db/index.ts","../src/templates/processors/relational-db/migrations.ts","../src/templates/processors/relational-db/processor.ts","../src/templates/processors/relational-db/schema.ts","../src/templates/subgraphs/index-file.ts","../src/templates/subgraphs/lib-file.ts","../src/templates/subgraphs/custom-schema.ts","../src/templates/subgraphs/custom-resolvers.ts","../src/utils/cli.ts","../src/utils/constants.ts","../src/utils/document-type-metadata.ts","../src/utils/format-with-prettier.ts","../src/utils/get-editor-metadata.ts","../src/utils/get-processor-metadata.ts","../src/utils/get-subgraph-metadata.ts","../src/utils/source-files.ts","../src/utils/syntax-builders.ts","../src/utils/syntax-getters.ts","../src/utils/ts-morph-project.ts","../src/utils/unsafe-utils.ts","../src/utils/validation.ts","../src/file-builders/editor-common.ts","../src/file-builders/app.ts","../src/file-builders/boilerplate/generated-project-files.ts","../src/file-builders/boilerplate/package.json.ts","../src/file-builders/clis/generate-cli-docs.ts","../src/file-builders/document-editor.ts","../src/codegen/graphql.ts","../src/file-builders/document-model/gen-dir.ts","../src/file-builders/document-model/root-dir.ts","../src/file-builders/document-model/src-dir.ts","../src/file-builders/document-model/tests-dir.ts","../src/file-builders/document-model/upgrades-dir.ts","../src/file-builders/document-model/document-model.ts","../src/file-builders/document-model/utils.ts","../src/file-builders/index-files.ts","../src/file-builders/manifest.ts","../src/file-builders/processors/analytics.ts","../src/file-builders/processors/relational-db.ts","../src/file-builders/processors/processor.ts","../src/file-builders/subgraphs.ts"],"sourcesContent":["// Ported from js-yaml v3.13.1:\n// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da\n// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.\n// Copyright 2018-2026 the Deno authors. MIT license.\nexport const BOM = 0xfeff; /* BOM */ \nexport const TAB = 0x09; /* Tab */ \nexport const LINE_FEED = 0x0a; /* LF */ \nexport const CARRIAGE_RETURN = 0x0d; /* CR */ \nexport const SPACE = 0x20; /* Space */ \nexport const EXCLAMATION = 0x21; /* ! */ \nexport const DOUBLE_QUOTE = 0x22; /* \" */ \nexport const SHARP = 0x23; /* # */ \nexport const PERCENT = 0x25; /* % */ \nexport const AMPERSAND = 0x26; /* & */ \nexport const SINGLE_QUOTE = 0x27; /* ' */ \nexport const ASTERISK = 0x2a; /* * */ \nexport const PLUS = 0x2b; /* + */ \nexport const COMMA = 0x2c; /* , */ \nexport const MINUS = 0x2d; /* - */ \nexport const DOT = 0x2e; /* . */ \nexport const COLON = 0x3a; /* : */ \nexport const SMALLER_THAN = 0x3c; /* < */ \nexport const GREATER_THAN = 0x3e; /* > */ \nexport const QUESTION = 0x3f; /* ? */ \nexport const COMMERCIAL_AT = 0x40; /* @ */ \nexport const LEFT_SQUARE_BRACKET = 0x5b; /* [ */ \nexport const BACKSLASH = 0x5c; /* \\ */ \nexport const RIGHT_SQUARE_BRACKET = 0x5d; /* ] */ \nexport const GRAVE_ACCENT = 0x60; /* ` */ \nexport const LEFT_CURLY_BRACKET = 0x7b; /* { */ \nexport const VERTICAL_LINE = 0x7c; /* | */ \nexport const RIGHT_CURLY_BRACKET = 0x7d; /* } */ \nexport function isEOL(c) {\n return c === LINE_FEED || c === CARRIAGE_RETURN;\n}\nexport function isWhiteSpace(c) {\n return c === TAB || c === SPACE;\n}\nexport function isWhiteSpaceOrEOL(c) {\n return isWhiteSpace(c) || isEOL(c);\n}\nexport function isFlowIndicator(c) {\n return c === COMMA || c === LEFT_SQUARE_BRACKET || c === RIGHT_SQUARE_BRACKET || c === LEFT_CURLY_BRACKET || c === RIGHT_CURLY_BRACKET;\n}\n//# sourceMappingURL=_chars.js.map","// Ported from js-yaml v3.13.1:\n// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.\n// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da\n// Copyright 2018-2026 the Deno authors. MIT license.\n// [ 64, 65, 66 ] -> [ padding, CR, LF ]\nconst BASE64_MAP = \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\\n\\r\";\nfunction resolveYamlBinary(data) {\n if (data === null) return false;\n let code;\n let bitlen = 0;\n const max = data.length;\n const map = BASE64_MAP;\n // Convert one by one.\n for(let idx = 0; idx < max; idx++){\n code = map.indexOf(data.charAt(idx));\n // Skip CR/LF\n if (code > 64) continue;\n // Fail on illegal characters\n if (code < 0) return false;\n bitlen += 6;\n }\n // If there are any bits left, source was corrupted\n return bitlen % 8 === 0;\n}\nfunction constructYamlBinary(data) {\n // remove CR/LF & padding to simplify scan\n const input = data.replace(/[\\r\\n=]/g, \"\");\n const max = input.length;\n const map = BASE64_MAP;\n // Collect by 6*4 bits (3 bytes)\n const result = [];\n let bits = 0;\n for(let idx = 0; idx < max; idx++){\n if (idx % 4 === 0 && idx) {\n result.push(bits >> 16 & 0xff);\n result.push(bits >> 8 & 0xff);\n result.push(bits & 0xff);\n }\n bits = bits << 6 | map.indexOf(input.charAt(idx));\n }\n // Dump tail\n const tailbits = max % 4 * 6;\n if (tailbits === 0) {\n result.push(bits >> 16 & 0xff);\n result.push(bits >> 8 & 0xff);\n result.push(bits & 0xff);\n } else if (tailbits === 18) {\n result.push(bits >> 10 & 0xff);\n result.push(bits >> 2 & 0xff);\n } else if (tailbits === 12) {\n result.push(bits >> 4 & 0xff);\n }\n return new Uint8Array(result);\n}\nfunction representYamlBinary(object) {\n const max = object.length;\n const map = BASE64_MAP;\n // Convert every three bytes to 4 ASCII characters.\n let result = \"\";\n let bits = 0;\n for(let idx = 0; idx < max; idx++){\n if (idx % 3 === 0 && idx) {\n result += map[bits >> 18 & 0x3f];\n result += map[bits >> 12 & 0x3f];\n result += map[bits >> 6 & 0x3f];\n result += map[bits & 0x3f];\n }\n bits = (bits << 8) + object[idx];\n }\n // Dump tail\n const tail = max % 3;\n if (tail === 0) {\n result += map[bits >> 18 & 0x3f];\n result += map[bits >> 12 & 0x3f];\n result += map[bits >> 6 & 0x3f];\n result += map[bits & 0x3f];\n } else if (tail === 2) {\n result += map[bits >> 10 & 0x3f];\n result += map[bits >> 4 & 0x3f];\n result += map[bits << 2 & 0x3f];\n result += map[64];\n } else if (tail === 1) {\n result += map[bits >> 2 & 0x3f];\n result += map[bits << 4 & 0x3f];\n result += map[64];\n result += map[64];\n }\n return result;\n}\nfunction isBinary(obj) {\n return obj instanceof Uint8Array;\n}\nexport const binary = {\n tag: \"tag:yaml.org,2002:binary\",\n construct: constructYamlBinary,\n kind: \"scalar\",\n predicate: isBinary,\n represent: representYamlBinary,\n resolve: resolveYamlBinary\n};\n//# sourceMappingURL=binary.js.map","// Ported from js-yaml v3.13.1:\n// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da\n// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.\n// Copyright 2018-2026 the Deno authors. MIT license.\nconst YAML_TRUE_BOOLEANS = [\n \"true\",\n \"True\",\n \"TRUE\"\n];\nconst YAML_FALSE_BOOLEANS = [\n \"false\",\n \"False\",\n \"FALSE\"\n];\nconst YAML_BOOLEANS = [\n ...YAML_TRUE_BOOLEANS,\n ...YAML_FALSE_BOOLEANS\n];\nexport const bool = {\n tag: \"tag:yaml.org,2002:bool\",\n kind: \"scalar\",\n defaultStyle: \"lowercase\",\n predicate: (value)=>typeof value === \"boolean\" || value instanceof Boolean,\n construct: (data)=>YAML_TRUE_BOOLEANS.includes(data),\n resolve: (data)=>YAML_BOOLEANS.includes(data),\n represent: {\n // deno-lint-ignore ban-types\n lowercase: (object)=>{\n const value = object instanceof Boolean ? object.valueOf() : object;\n return value ? \"true\" : \"false\";\n },\n // deno-lint-ignore ban-types\n uppercase: (object)=>{\n const value = object instanceof Boolean ? object.valueOf() : object;\n return value ? \"TRUE\" : \"FALSE\";\n },\n // deno-lint-ignore ban-types\n camelcase: (object)=>{\n const value = object instanceof Boolean ? object.valueOf() : object;\n return value ? \"True\" : \"False\";\n }\n }\n};\n//# sourceMappingURL=bool.js.map","// Ported from js-yaml v3.13.1:\n// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da\n// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.\n// Copyright 2018-2026 the Deno authors. MIT license.\nexport function isObject(value) {\n return value !== null && typeof value === \"object\";\n}\nexport function isNegativeZero(i) {\n return i === 0 && Number.NEGATIVE_INFINITY === 1 / i;\n}\nexport function isPlainObject(object) {\n return Object.prototype.toString.call(object) === \"[object Object]\";\n}\n//# sourceMappingURL=_utils.js.map","// Ported from js-yaml v3.13.1:\n// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da\n// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.\n// Copyright 2018-2026 the Deno authors. MIT license.\nimport { isNegativeZero } from \"../_utils.js\";\nconst YAML_FLOAT_PATTERN = new RegExp(// 2.5e4, 2.5 and integers\n\"^(?:[-+]?(?:0|[1-9][0-9_]*)(?:\\\\.[0-9_]*)?(?:[eE][-+]?[0-9]+)?\" + // .2e4, .2\n// special case, seems not from spec\n\"|\\\\.[0-9_]+(?:[eE][-+]?[0-9]+)?\" + // .inf\n\"|[-+]?\\\\.(?:inf|Inf|INF)\" + // .nan\n\"|\\\\.(?:nan|NaN|NAN))$\");\nfunction resolveYamlFloat(data) {\n if (!YAML_FLOAT_PATTERN.test(data) || // Quick hack to not allow integers end with `_`\n // Probably should update regexp & check speed\n data[data.length - 1] === \"_\") {\n return false;\n }\n return true;\n}\nfunction constructYamlFloat(data) {\n let value = data.replace(/_/g, \"\").toLowerCase();\n const sign = value[0] === \"-\" ? -1 : 1;\n if (value[0] && \"+-\".includes(value[0])) {\n value = value.slice(1);\n }\n if (value === \".inf\") {\n return sign === 1 ? Number.POSITIVE_INFINITY : Number.NEGATIVE_INFINITY;\n }\n if (value === \".nan\") {\n return NaN;\n }\n return sign * parseFloat(value);\n}\nconst SCIENTIFIC_WITHOUT_DOT = /^[-+]?[0-9]+e/;\nfunction representYamlFloat(// deno-lint-ignore ban-types\nobject, style) {\n const value = object instanceof Number ? object.valueOf() : object;\n if (isNaN(value)) {\n switch(style){\n case \"lowercase\":\n return \".nan\";\n case \"uppercase\":\n return \".NAN\";\n case \"camelcase\":\n return \".NaN\";\n }\n } else if (Number.POSITIVE_INFINITY === value) {\n switch(style){\n case \"lowercase\":\n return \".inf\";\n case \"uppercase\":\n return \".INF\";\n case \"camelcase\":\n return \".Inf\";\n }\n } else if (Number.NEGATIVE_INFINITY === value) {\n switch(style){\n case \"lowercase\":\n return \"-.inf\";\n case \"uppercase\":\n return \"-.INF\";\n case \"camelcase\":\n return \"-.Inf\";\n }\n } else if (isNegativeZero(value)) {\n return \"-0.0\";\n }\n const res = value.toString(10);\n // JS stringifier can build scientific format without dots: 5e-100,\n // while YAML requires dot: 5.e-100. Fix it with simple hack\n return SCIENTIFIC_WITHOUT_DOT.test(res) ? res.replace(\"e\", \".e\") : res;\n}\nfunction isFloat(object) {\n if (object instanceof Number) object = object.valueOf();\n return typeof object === \"number\" && (object % 1 !== 0 || isNegativeZero(object));\n}\nexport const float = {\n tag: \"tag:yaml.org,2002:float\",\n construct: constructYamlFloat,\n defaultStyle: \"lowercase\",\n kind: \"scalar\",\n predicate: isFloat,\n represent: representYamlFloat,\n resolve: resolveYamlFloat\n};\n//# sourceMappingURL=float.js.map","// Ported from js-yaml v3.13.1:\n// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da\n// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.\n// Copyright 2018-2026 the Deno authors. MIT license.\nimport { isNegativeZero } from \"../_utils.js\";\nfunction isCharCodeInRange(c, lower, upper) {\n return lower <= c && c <= upper;\n}\nfunction isHexCode(c) {\n return isCharCodeInRange(c, 0x30, 0x39) || // 0-9\n isCharCodeInRange(c, 0x41, 0x46) || // A-F\n isCharCodeInRange(c, 0x61, 0x66) // a-f\n ;\n}\nfunction isOctCode(c) {\n return isCharCodeInRange(c, 0x30, 0x37); // 0-7\n}\nfunction isDecCode(c) {\n return isCharCodeInRange(c, 0x30, 0x39); // 0-9\n}\nfunction resolveYamlInteger(data) {\n const max = data.length;\n let index = 0;\n let hasDigits = false;\n if (!max) return false;\n let ch = data[index];\n // sign\n if (ch === \"-\" || ch === \"+\") {\n ch = data[++index];\n }\n if (ch === \"0\") {\n // 0\n if (index + 1 === max) return true;\n ch = data[++index];\n // base 2, base 8, base 16\n if (ch === \"b\") {\n // base 2\n index++;\n for(; index < max; index++){\n ch = data[index];\n if (ch === \"_\") continue;\n if (ch !== \"0\" && ch !== \"1\") return false;\n hasDigits = true;\n }\n return hasDigits && ch !== \"_\";\n }\n if (ch === \"x\") {\n // base 16\n index++;\n for(; index < max; index++){\n ch = data[index];\n if (ch === \"_\") continue;\n if (!isHexCode(data.charCodeAt(index))) return false;\n hasDigits = true;\n }\n return hasDigits && ch !== \"_\";\n }\n // base 8\n for(; index < max; index++){\n ch = data[index];\n if (ch === \"_\") continue;\n if (!isOctCode(data.charCodeAt(index))) return false;\n hasDigits = true;\n }\n return hasDigits && ch !== \"_\";\n }\n // base 10 (except 0) or base 60\n // value should not start with `_`;\n if (ch === \"_\") return false;\n for(; index < max; index++){\n ch = data[index];\n if (ch === \"_\") continue;\n if (!isDecCode(data.charCodeAt(index))) {\n return false;\n }\n hasDigits = true;\n }\n // Should have digits and should not end with `_`\n if (!hasDigits || ch === \"_\") return false;\n // base60 almost not used, no needs to optimize\n return /^(:[0-5]?[0-9])+$/.test(data.slice(index));\n}\nfunction constructYamlInteger(data) {\n let value = data;\n if (value.includes(\"_\")) {\n value = value.replace(/_/g, \"\");\n }\n let sign = 1;\n let ch = value[0];\n if (ch === \"-\" || ch === \"+\") {\n if (ch === \"-\") sign = -1;\n value = value.slice(1);\n ch = value[0];\n }\n if (value === \"0\") return 0;\n if (ch === \"0\") {\n if (value[1] === \"b\") return sign * parseInt(value.slice(2), 2);\n if (value[1] === \"x\") return sign * parseInt(value, 16);\n return sign * parseInt(value, 8);\n }\n return sign * parseInt(value, 10);\n}\nfunction isInteger(object) {\n if (object instanceof Number) object = object.valueOf();\n return typeof object === \"number\" && object % 1 === 0 && !isNegativeZero(object);\n}\nexport const int = {\n tag: \"tag:yaml.org,2002:int\",\n construct: constructYamlInteger,\n defaultStyle: \"decimal\",\n kind: \"scalar\",\n predicate: isInteger,\n represent: {\n // deno-lint-ignore ban-types\n binary (object) {\n const value = object instanceof Number ? object.valueOf() : object;\n return value >= 0 ? `0b${value.toString(2)}` : `-0b${value.toString(2).slice(1)}`;\n },\n // deno-lint-ignore ban-types\n octal (object) {\n const value = object instanceof Number ? object.valueOf() : object;\n return value >= 0 ? `0${value.toString(8)}` : `-0${value.toString(8).slice(1)}`;\n },\n // deno-lint-ignore ban-types\n decimal (object) {\n const value = object instanceof Number ? object.valueOf() : object;\n return value.toString(10);\n },\n // deno-lint-ignore ban-types\n hexadecimal (object) {\n const value = object instanceof Number ? object.valueOf() : object;\n return value >= 0 ? `0x${value.toString(16).toUpperCase()}` : `-0x${value.toString(16).toUpperCase().slice(1)}`;\n }\n },\n resolve: resolveYamlInteger\n};\n//# sourceMappingURL=int.js.map","// Ported from js-yaml v3.13.1:\n// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da\n// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.\n// Copyright 2018-2026 the Deno authors. MIT license.\nexport const map = {\n tag: \"tag:yaml.org,2002:map\",\n resolve () {\n return true;\n },\n construct (data) {\n return data !== null ? data : {};\n },\n kind: \"mapping\"\n};\n//# sourceMappingURL=map.js.map","// Ported from js-yaml v3.13.1:\n// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da\n// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.\n// Copyright 2018-2026 the Deno authors. MIT license.\nexport const merge = {\n tag: \"tag:yaml.org,2002:merge\",\n kind: \"scalar\",\n resolve: (data)=>data === \"<<\" || data === null,\n construct: (data)=>data\n};\n//# sourceMappingURL=merge.js.map","// Ported from js-yaml v3.13.1:\n// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da\n// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.\n// Copyright 2018-2026 the Deno authors. MIT license.\nexport const nil = {\n tag: \"tag:yaml.org,2002:null\",\n kind: \"scalar\",\n defaultStyle: \"lowercase\",\n predicate: (object)=>object === null,\n construct: ()=>null,\n resolve: (data)=>{\n return data === \"~\" || data === \"null\" || data === \"Null\" || data === \"NULL\";\n },\n represent: {\n lowercase: ()=>\"null\",\n uppercase: ()=>\"NULL\",\n camelcase: ()=>\"Null\"\n }\n};\n//# sourceMappingURL=nil.js.map","// Ported from js-yaml v3.13.1:\n// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da\n// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.\n// Copyright 2018-2026 the Deno authors. MIT license.\nimport { isPlainObject } from \"../_utils.js\";\nfunction resolveYamlOmap(data) {\n const objectKeys = new Set();\n for (const object of data){\n if (!isPlainObject(object)) return false;\n const keys = Object.keys(object);\n if (keys.length !== 1) return false;\n for (const key of keys){\n if (objectKeys.has(key)) return false;\n objectKeys.add(key);\n }\n }\n return true;\n}\nexport const omap = {\n tag: \"tag:yaml.org,2002:omap\",\n kind: \"sequence\",\n resolve: resolveYamlOmap,\n construct (data) {\n return data;\n }\n};\n//# sourceMappingURL=omap.js.map","// Ported from js-yaml v3.13.1:\n// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da\n// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.\n// Copyright 2018-2026 the Deno authors. MIT license.\nimport { isPlainObject } from \"../_utils.js\";\nfunction resolveYamlPairs(data) {\n if (data === null) return true;\n return data.every((it)=>isPlainObject(it) && Object.keys(it).length === 1);\n}\nexport const pairs = {\n tag: \"tag:yaml.org,2002:pairs\",\n construct (data) {\n // Converts an array of objects into an array of key-value pairs.\n return data?.flatMap(Object.entries) ?? [];\n },\n kind: \"sequence\",\n resolve: resolveYamlPairs\n};\n//# sourceMappingURL=pairs.js.map","// Ported and adapted from js-yaml-js-types v1.0.0:\n// https://github.com/nodeca/js-yaml-js-types/tree/ac537e7bbdd3c2cbbd9882ca3919c520c2dc022b\n// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.\n// Copyright 2018-2026 the Deno authors. MIT license.\nconst REGEXP = /^\\/(?<regexp>[\\s\\S]+)\\/(?<modifiers>[gismuy]*)$/;\nexport const regexp = {\n tag: \"tag:yaml.org,2002:js/regexp\",\n kind: \"scalar\",\n resolve (data) {\n if (data === null || !data.length) return false;\n if (data.charAt(0) === \"/\") {\n // Ensure regex is properly terminated\n const groups = data.match(REGEXP)?.groups;\n if (!groups) return false;\n // Check no duplicate modifiers\n const modifiers = groups.modifiers ?? \"\";\n if (new Set(modifiers).size < modifiers.length) return false;\n }\n return true;\n },\n construct (data) {\n const { regexp = data, modifiers = \"\" } = data.match(REGEXP)?.groups ?? {};\n return new RegExp(regexp, modifiers);\n },\n predicate: (object)=>object instanceof RegExp,\n represent: (object)=>object.toString()\n};\n//# sourceMappingURL=regexp.js.map","// Ported from js-yaml v3.13.1:\n// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da\n// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.\n// Copyright 2018-2026 the Deno authors. MIT license.\nexport const seq = {\n tag: \"tag:yaml.org,2002:seq\",\n kind: \"sequence\",\n resolve: ()=>true,\n construct: (data)=>data !== null ? data : []\n};\n//# sourceMappingURL=seq.js.map","// Ported from js-yaml v3.13.1:\n// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da\n// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.\n// Copyright 2018-2026 the Deno authors. MIT license.\nexport const set = {\n tag: \"tag:yaml.org,2002:set\",\n kind: \"mapping\",\n construct: (data)=>data !== null ? data : {},\n resolve: (data)=>{\n if (data === null) return true;\n return Object.values(data).every((it)=>it === null);\n }\n};\n//# sourceMappingURL=set.js.map","// Ported from js-yaml v3.13.1:\n// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da\n// Copyright 2018-2026 the Deno authors. MIT license.\nexport const str = {\n tag: \"tag:yaml.org,2002:str\",\n kind: \"scalar\",\n resolve: ()=>true,\n construct: (data)=>data !== null ? data : \"\"\n};\n//# sourceMappingURL=str.js.map","// Ported from js-yaml v3.13.1:\n// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da\n// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.\n// Copyright 2018-2026 the Deno authors. MIT license.\nconst YAML_DATE_REGEXP = new RegExp(\"^([0-9][0-9][0-9][0-9])\" + // [1] year\n\"-([0-9][0-9])\" + // [2] month\n\"-([0-9][0-9])$\");\nconst YAML_TIMESTAMP_REGEXP = new RegExp(\"^([0-9][0-9][0-9][0-9])\" + // [1] year\n\"-([0-9][0-9]?)\" + // [2] month\n\"-([0-9][0-9]?)\" + // [3] day\n\"(?:[Tt]|[ \\\\t]+)\" + // ...\n\"([0-9][0-9]?)\" + // [4] hour\n\":([0-9][0-9])\" + // [5] minute\n\":([0-9][0-9])\" + // [6] second\n\"(?:\\\\.([0-9]*))?\" + // [7] fraction\n\"(?:[ \\\\t]*(Z|([-+])([0-9][0-9]?)\" + // [8] tz [9] tz_sign [10] tz_hour\n\"(?::([0-9][0-9]))?))?$\");\nfunction resolveYamlTimestamp(data) {\n if (data === null) return false;\n if (YAML_DATE_REGEXP.exec(data) !== null) return true;\n if (YAML_TIMESTAMP_REGEXP.exec(data) !== null) return true;\n return false;\n}\nfunction constructYamlTimestamp(data) {\n let match = YAML_DATE_REGEXP.exec(data);\n if (match === null) match = YAML_TIMESTAMP_REGEXP.exec(data);\n if (match === null) {\n throw new Error(\"Cannot construct YAML timestamp: date resolve error\");\n }\n // match: [1] year [2] month [3] day\n const year = +match[1];\n const month = +match[2] - 1; // JS month starts with 0\n const day = +match[3];\n if (!match[4]) {\n // no hour\n return new Date(Date.UTC(year, month, day));\n }\n // match: [4] hour [5] minute [6] second [7] fraction\n const hour = +match[4];\n const minute = +match[5];\n const second = +match[6];\n let fraction = 0;\n if (match[7]) {\n let partFraction = match[7].slice(0, 3);\n while(partFraction.length < 3){\n // milli-seconds\n partFraction += \"0\";\n }\n fraction = +partFraction;\n }\n // match: [8] tz [9] tz_sign [10] tz_hour [11] tz_minute\n let delta = null;\n if (match[9] && match[10]) {\n const tzHour = +match[10];\n const tzMinute = +(match[11] || 0);\n delta = (tzHour * 60 + tzMinute) * 60000; // delta in milli-seconds\n if (match[9] === \"-\") delta = -delta;\n }\n const date = new Date(Date.UTC(year, month, day, hour, minute, second, fraction));\n if (delta) date.setTime(date.getTime() - delta);\n return date;\n}\nfunction representYamlTimestamp(date) {\n return date.toISOString();\n}\nexport const timestamp = {\n tag: \"tag:yaml.org,2002:timestamp\",\n construct: constructYamlTimestamp,\n predicate (object) {\n return object instanceof Date;\n },\n kind: \"scalar\",\n represent: representYamlTimestamp,\n resolve: resolveYamlTimestamp\n};\n//# sourceMappingURL=timestamp.js.map","// Ported and adapted from js-yaml-js-types v1.0.0:\n// https://github.com/nodeca/js-yaml-js-types/tree/ac537e7bbdd3c2cbbd9882ca3919c520c2dc022b\n// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.\n// Copyright 2018-2026 the Deno authors. MIT license.\nexport const undefinedType = {\n tag: \"tag:yaml.org,2002:js/undefined\",\n kind: \"scalar\",\n resolve () {\n return true;\n },\n construct () {\n return undefined;\n },\n predicate (object) {\n return typeof object === \"undefined\";\n },\n represent () {\n return \"\";\n }\n};\n//# sourceMappingURL=undefined.js.map","// Ported from js-yaml v3.13.1:\n// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da\n// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.\n// Copyright 2018-2026 the Deno authors. MIT license.\n// This module is browser compatible.\nimport { binary } from \"./_type/binary.js\";\nimport { bool } from \"./_type/bool.js\";\nimport { float } from \"./_type/float.js\";\nimport { int } from \"./_type/int.js\";\nimport { map } from \"./_type/map.js\";\nimport { merge } from \"./_type/merge.js\";\nimport { nil } from \"./_type/nil.js\";\nimport { omap } from \"./_type/omap.js\";\nimport { pairs } from \"./_type/pairs.js\";\nimport { regexp } from \"./_type/regexp.js\";\nimport { seq } from \"./_type/seq.js\";\nimport { set } from \"./_type/set.js\";\nimport { str } from \"./_type/str.js\";\nimport { timestamp } from \"./_type/timestamp.js\";\nimport { undefinedType } from \"./_type/undefined.js\";\nfunction createTypeMap(implicitTypes, explicitTypes) {\n const result = {\n fallback: new Map(),\n mapping: new Map(),\n scalar: new Map(),\n sequence: new Map()\n };\n const fallbackMap = result.fallback;\n for (const type of [\n ...implicitTypes,\n ...explicitTypes\n ]){\n const map = result[type.kind];\n map.set(type.tag, type);\n fallbackMap.set(type.tag, type);\n }\n return result;\n}\nfunction createSchema({ explicitTypes = [], implicitTypes = [], include }) {\n if (include) {\n implicitTypes.push(...include.implicitTypes);\n explicitTypes.push(...include.explicitTypes);\n }\n const typeMap = createTypeMap(implicitTypes, explicitTypes);\n return {\n implicitTypes,\n explicitTypes,\n typeMap\n };\n}\n/**\n * Standard YAML's failsafe schema.\n *\n * @see {@link http://www.yaml.org/spec/1.2/spec.html#id2802346}\n */ const FAILSAFE_SCHEMA = createSchema({\n explicitTypes: [\n str,\n seq,\n map\n ]\n});\n/**\n * Standard YAML's JSON schema.\n *\n * @see {@link http://www.yaml.org/spec/1.2/spec.html#id2803231}\n */ const JSON_SCHEMA = createSchema({\n implicitTypes: [\n nil,\n bool,\n int,\n float\n ],\n include: FAILSAFE_SCHEMA\n});\n/**\n * Standard YAML's core schema.\n *\n * @see {@link http://www.yaml.org/spec/1.2/spec.html#id2804923}\n */ const CORE_SCHEMA = createSchema({\n include: JSON_SCHEMA\n});\n/**\n * Default YAML schema. It is not described in the YAML specification.\n */ export const DEFAULT_SCHEMA = createSchema({\n explicitTypes: [\n binary,\n omap,\n pairs,\n set\n ],\n implicitTypes: [\n timestamp,\n merge\n ],\n include: CORE_SCHEMA\n});\n/***\n * Extends JS-YAML default schema with additional JavaScript types\n * It is not described in the YAML specification.\n * Functions are no longer supported for security reasons.\n *\n * @example\n * ```ts\n * import { parse } from \"@std/yaml\";\n *\n * const data = parse(\n * `\n * regexp:\n * simple: !!js/regexp foobar\n * modifiers: !!js/regexp /foobar/mi\n * undefined: !!js/undefined ~\n * `,\n * { schema: \"extended\" },\n * );\n * ```\n */ const EXTENDED_SCHEMA = createSchema({\n explicitTypes: [\n regexp,\n undefinedType\n ],\n include: DEFAULT_SCHEMA\n});\nexport const SCHEMA_MAP = new Map([\n [\n \"core\",\n CORE_SCHEMA\n ],\n [\n \"default\",\n DEFAULT_SCHEMA\n ],\n [\n \"failsafe\",\n FAILSAFE_SCHEMA\n ],\n [\n \"json\",\n JSON_SCHEMA\n ],\n [\n \"extended\",\n EXTENDED_SCHEMA\n ]\n]);\nexport function getSchema(schema = \"default\", types) {\n const schemaObj = SCHEMA_MAP.get(schema);\n if (!types) {\n return schemaObj;\n }\n return createSchema({\n implicitTypes: [\n ...types,\n ...schemaObj.implicitTypes\n ],\n explicitTypes: [\n ...schemaObj.explicitTypes\n ]\n });\n}\n//# sourceMappingURL=_schema.js.map","// Ported from js-yaml v3.13.1:\n// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da\n// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.\n// Copyright 2018-2026 the Deno authors. MIT license.\nimport { AMPERSAND, ASTERISK, BACKSLASH, CARRIAGE_RETURN, COLON, COMMA, COMMERCIAL_AT, DOT, DOUBLE_QUOTE, EXCLAMATION, GRAVE_ACCENT, GREATER_THAN, isEOL, isFlowIndicator, isWhiteSpace, isWhiteSpaceOrEOL, LEFT_CURLY_BRACKET, LEFT_SQUARE_BRACKET, LINE_FEED, MINUS, PERCENT, PLUS, QUESTION, RIGHT_CURLY_BRACKET, RIGHT_SQUARE_BRACKET, SHARP, SINGLE_QUOTE, SMALLER_THAN, SPACE, VERTICAL_LINE } from \"./_chars.js\";\nimport { DEFAULT_SCHEMA } from \"./_schema.js\";\nimport { isObject, isPlainObject } from \"./_utils.js\";\nconst CONTEXT_FLOW_IN = 1;\nconst CONTEXT_FLOW_OUT = 2;\nconst CONTEXT_BLOCK_IN = 3;\nconst CONTEXT_BLOCK_OUT = 4;\nconst CHOMPING_CLIP = 1;\nconst CHOMPING_STRIP = 2;\nconst CHOMPING_KEEP = 3;\nconst PATTERN_NON_PRINTABLE = // deno-lint-ignore no-control-regex\n/[\\x00-\\x08\\x0B\\x0C\\x0E-\\x1F\\x7F-\\x84\\x86-\\x9F\\uFFFE\\uFFFF]|[\\uD800-\\uDBFF](?![\\uDC00-\\uDFFF])|(?:[^\\uD800-\\uDBFF]|^)[\\uDC00-\\uDFFF]/;\nconst PATTERN_NON_ASCII_LINE_BREAKS = /[\\x85\\u2028\\u2029]/;\nconst PATTERN_FLOW_INDICATORS = /[,\\[\\]\\{\\}]/;\nconst PATTERN_TAG_HANDLE = /^(?:!|!!|![a-z\\-]+!)$/i;\nconst PATTERN_TAG_URI = /^(?:!|[^,\\[\\]\\{\\}])(?:%[0-9a-f]{2}|[0-9a-z\\-#;\\/\\?:@&=\\+\\$,_\\.!~\\*'\\(\\)\\[\\]])*$/i;\nconst ESCAPED_HEX_LENGTHS = new Map([\n [\n 0x78,\n 2\n ],\n [\n 0x75,\n 4\n ],\n [\n 0x55,\n 8\n ]\n]);\nconst SIMPLE_ESCAPE_SEQUENCES = new Map([\n [\n 0x30,\n \"\\x00\"\n ],\n [\n 0x61,\n \"\\x07\"\n ],\n [\n 0x62,\n \"\\x08\"\n ],\n [\n 0x74,\n \"\\x09\"\n ],\n [\n 0x09,\n \"\\x09\"\n ],\n [\n 0x6e,\n \"\\x0A\"\n ],\n [\n 0x76,\n \"\\x0B\"\n ],\n [\n 0x66,\n \"\\x0C\"\n ],\n [\n 0x72,\n \"\\x0D\"\n ],\n [\n 0x65,\n \"\\x1B\"\n ],\n [\n 0x20,\n \" \"\n ],\n [\n 0x22,\n '\"'\n ],\n [\n 0x2f,\n \"/\"\n ],\n [\n 0x5c,\n \"\\\\\"\n ],\n [\n 0x4e,\n \"\\x85\"\n ],\n [\n 0x5f,\n \"\\xA0\"\n ],\n [\n 0x4c,\n \"\\u2028\"\n ],\n [\n 0x50,\n \"\\u2029\"\n ]\n]);\n/**\n * Converts a hexadecimal character code to its decimal value.\n */ function hexCharCodeToNumber(charCode) {\n // Check if the character code is in the range for '0' to '9'\n if (0x30 <= charCode && charCode <= 0x39) return charCode - 0x30; // Convert '0'-'9' to 0-9\n // Normalize the character code to lowercase if it's a letter\n const lc = charCode | 0x20;\n // Check if the character code is in the range for 'a' to 'f'\n if (0x61 <= lc && lc <= 0x66) return lc - 0x61 + 10; // Convert 'a'-'f' to 10-15\n return -1;\n}\n/**\n * Converts a decimal character code to its decimal value.\n */ function decimalCharCodeToNumber(charCode) {\n // Check if the character code is in the range for '0' to '9'\n if (0x30 <= charCode && charCode <= 0x39) return charCode - 0x30; // Convert '0'-'9' to 0-9\n return -1;\n}\n/**\n * Converts a Unicode code point to a string.\n */ function codepointToChar(codepoint) {\n // Check if the code point is within the Basic Multilingual Plane (BMP)\n if (codepoint <= 0xffff) return String.fromCharCode(codepoint); // Convert BMP code point to character\n // Encode UTF-16 surrogate pair for code points beyond BMP\n // Reference: https://en.wikipedia.org/wiki/UTF-16#Code_points_U.2B010000_to_U.2B10FFFF\n return String.fromCharCode((codepoint - 0x010000 >> 10) + 0xd800, (codepoint - 0x010000 & 0x03ff) + 0xdc00);\n}\nconst INDENT = 4;\nconst MAX_LENGTH = 75;\nconst DELIMITERS = \"\\x00\\r\\n\\x85\\u2028\\u2029\";\nfunction getSnippet(buffer, position) {\n if (!buffer) return null;\n let start = position;\n let end = position;\n let head = \"\";\n let tail = \"\";\n while(start > 0 && !DELIMITERS.includes(buffer.charAt(start - 1))){\n start--;\n if (position - start > MAX_LENGTH / 2 - 1) {\n head = \" ... \";\n start += 5;\n break;\n }\n }\n while(end < buffer.length && !DELIMITERS.includes(buffer.charAt(end))){\n end++;\n if (end - position > MAX_LENGTH / 2 - 1) {\n tail = \" ... \";\n end -= 5;\n break;\n }\n }\n const snippet = buffer.slice(start, end);\n const indent = \" \".repeat(INDENT);\n const caretIndent = \" \".repeat(INDENT + position - start + head.length);\n return `${indent + head + snippet + tail}\\n${caretIndent}^`;\n}\nfunction markToString(buffer, position, line, column) {\n let where = `at line ${line + 1}, column ${column + 1}`;\n const snippet = getSnippet(buffer, position);\n if (snippet) where += `:\\n${snippet}`;\n return where;\n}\nfunction getIndentStatus(lineIndent, parentIndent) {\n if (lineIndent > parentIndent) return 1;\n if (lineIndent < parentIndent) return -1;\n return 0;\n}\nfunction writeFoldedLines(count) {\n if (count === 1) return \" \";\n if (count > 1) return \"\\n\".repeat(count - 1);\n return \"\";\n}\nclass Scanner {\n source;\n #length;\n position = 0;\n constructor(source){\n // Use 0 as string terminator. That significantly simplifies bounds check.\n source += \"\\0\";\n this.source = source;\n this.#length = source.length;\n }\n peek(offset = 0) {\n return this.source.charCodeAt(this.position + offset);\n }\n next() {\n this.position += 1;\n }\n eof() {\n return this.position >= this.#length - 1;\n }\n}\nexport class LoaderState {\n #scanner;\n lineIndent = 0;\n lineStart = 0;\n line = 0;\n onWarning;\n allowDuplicateKeys;\n implicitTypes;\n typeMap;\n checkLineBreaks = false;\n tagMap = new Map();\n anchorMap = new Map();\n constructor(input, { schema = DEFAULT_SCHEMA, onWarning, allowDuplicateKeys = false }){\n this.#scanner = new Scanner(input);\n this.onWarning = onWarning;\n this.allowDuplicateKeys = allowDuplicateKeys;\n this.implicitTypes = schema.implicitTypes;\n this.typeMap = schema.typeMap;\n this.readIndent();\n }\n skipWhitespaces() {\n let ch = this.#scanner.peek();\n while(isWhiteSpace(ch)){\n this.#scanner.next();\n ch = this.#scanner.peek();\n }\n }\n skipComment() {\n let ch = this.#scanner.peek();\n if (ch !== SHARP) return;\n this.#scanner.next();\n ch = this.#scanner.peek();\n while(ch !== 0 && !isEOL(ch)){\n this.#scanner.next();\n ch = this.#scanner.peek();\n }\n }\n readIndent() {\n let ch = this.#scanner.peek();\n while(ch === SPACE){\n this.lineIndent += 1;\n this.#scanner.next();\n ch = this.#scanner.peek();\n }\n }\n #createError(message) {\n const mark = markToString(this.#scanner.source, this.#scanner.position, this.line, this.#scanner.position - this.lineStart);\n return new SyntaxError(`${message} ${mark}`);\n }\n dispatchWarning(message) {\n const error = this.#createError(message);\n this.onWarning?.(error);\n }\n yamlDirectiveHandler(args) {\n if (args.length !== 1) {\n throw this.#createError(\"Cannot handle YAML directive: YAML directive accepts exactly one argument\");\n }\n const match = /^([0-9]+)\\.([0-9]+)$/.exec(args[0]);\n if (match === null) {\n throw this.#createError(\"Cannot handle YAML directive: ill-formed argument\");\n }\n const major = parseInt(match[1], 10);\n const minor = parseInt(match[2], 10);\n if (major !== 1) {\n throw this.#createError(\"Cannot handle YAML directive: unacceptable YAML version\");\n }\n this.checkLineBreaks = minor < 2;\n if (minor !== 1 && minor !== 2) {\n this.dispatchWarning(\"Cannot handle YAML directive: unsupported YAML version\");\n }\n return args[0] ?? null;\n }\n tagDirectiveHandler(args) {\n if (args.length !== 2) {\n throw this.#createError(`Cannot handle tag directive: directive accepts exactly two arguments, received ${args.length}`);\n }\n const handle = args[0];\n const prefix = args[1];\n if (!PATTERN_TAG_HANDLE.test(handle)) {\n throw this.#createError(`Cannot handle tag directive: ill-formed handle (first argument) in \"${handle}\"`);\n }\n if (this.tagMap.has(handle)) {\n throw this.#createError(`Cannot handle tag directive: previously declared suffix for \"${handle}\" tag handle`);\n }\n if (!PATTERN_TAG_URI.test(prefix)) {\n throw this.#createError(\"Cannot handle tag directive: ill-formed tag prefix (second argument) of the TAG directive\");\n }\n this.tagMap.set(handle, prefix);\n }\n captureSegment(start, end, checkJson) {\n if (start < end) {\n const result = this.#scanner.source.slice(start, end);\n if (checkJson) {\n for(let position = 0; position < result.length; position++){\n const character = result.charCodeAt(position);\n if (!(character === 0x09 || 0x20 <= character && character <= 0x10ffff)) {\n throw this.#createError(`Expected valid JSON character: received \"${character}\"`);\n }\n }\n } else if (PATTERN_NON_PRINTABLE.test(result)) {\n throw this.#createError(\"Stream contains non-printable characters\");\n }\n return result;\n }\n }\n readBlockSequence(tag, anchor, nodeIndent) {\n let detected = false;\n const result = [];\n if (anchor !== null) this.anchorMap.set(anchor, result);\n let ch = this.#scanner.peek();\n while(ch !== 0){\n if (ch !== MINUS) {\n break;\n }\n const following = this.#scanner.peek(1);\n if (!isWhiteSpaceOrEOL(following)) {\n break;\n }\n detected = true;\n this.#scanner.next();\n if (this.skipSeparationSpace(true, -1)) {\n if (this.lineIndent <= nodeIndent) {\n result.push(null);\n ch = this.#scanner.peek();\n continue;\n }\n }\n const line = this.line;\n const newState = this.composeNode({\n parentIndent: nodeIndent,\n nodeContext: CONTEXT_BLOCK_IN,\n allowToSeek: false,\n allowCompact: true\n });\n if (newState) result.push(newState.result);\n this.skipSeparationSpace(true, -1);\n ch = this.#scanner.peek();\n if ((this.line === line || this.lineIndent > nodeIndent) && ch !== 0) {\n throw this.#createError(\"Cannot read block sequence: bad indentation of a sequence entry\");\n } else if (this.lineIndent < nodeIndent) {\n break;\n }\n }\n if (detected) return {\n tag,\n anchor,\n kind: \"sequence\",\n result\n };\n }\n mergeMappings(destination, source, overridableKeys) {\n if (!isObject(source)) {\n throw this.#createError(\"Cannot merge mappings: the provided source object is unacceptable\");\n }\n for (const [key, value] of Object.entries(source)){\n if (Object.hasOwn(destination, key)) continue;\n Object.defineProperty(destination, key, {\n value,\n writable: true,\n enumerable: true,\n configurable: true\n });\n overridableKeys.add(key);\n }\n }\n storeMappingPair(result, overridableKeys, keyTag, keyNode, valueNode, startLine, startPos) {\n // The output is a plain object here, so keys can only be strings.\n // We need to convert keyNode to a string, but doing so can hang the process\n // (deeply nested arrays that explode exponentially using aliases).\n if (Array.isArray(keyNode)) {\n keyNode = Array.prototype.slice.call(keyNode);\n for(let index = 0; index < keyNode.length; index++){\n if (Array.isArray(keyNode[index])) {\n throw this.#createError(\"Cannot store mapping pair: nested arrays are not supported inside keys\");\n }\n if (typeof keyNode === \"object\" && isPlainObject(keyNode[index])) {\n keyNode[index] = \"[object Object]\";\n }\n }\n }\n // Avoid code execution in load() via toString property\n // (still use its own toString for arrays, timestamps,\n // and whatever user schema extensions happen to have @@toStringTag)\n if (typeof keyNode === \"object\" && isPlainObject(keyNode)) {\n keyNode = \"[object Object]\";\n }\n keyNode = String(keyNode);\n if (keyTag === \"tag:yaml.org,2002:merge\") {\n if (Array.isArray(valueNode)) {\n for(let index = 0; index < valueNode.length; index++){\n this.mergeMappings(result, valueNode[index], overridableKeys);\n }\n } else {\n this.mergeMappings(result, valueNode, overridableKeys);\n }\n } else {\n if (!this.allowDuplicateKeys && !overridableKeys.has(keyNode) && Object.hasOwn(result, keyNode)) {\n this.line = startLine || this.line;\n this.#scanner.position = startPos || this.#scanner.position;\n throw this.#createError(\"Cannot store mapping pair: duplicated key\");\n }\n Object.defineProperty(result, keyNode, {\n value: valueNode,\n writable: true,\n enumerable: true,\n configurable: true\n });\n overridableKeys.delete(keyNode);\n }\n return result;\n }\n readLineBreak() {\n const ch = this.#scanner.peek();\n if (ch === LINE_FEED) {\n this.#scanner.next();\n } else if (ch === CARRIAGE_RETURN) {\n this.#scanner.next();\n if (this.#scanner.peek() === LINE_FEED) {\n this.#scanner.next();\n }\n } else {\n throw this.#createError(\"Cannot read line: line break not found\");\n }\n this.line += 1;\n this.lineStart = this.#scanner.position;\n }\n skipSeparationSpace(allowComments, checkIndent) {\n let lineBreaks = 0;\n let ch = this.#scanner.peek();\n while(ch !== 0){\n this.skipWhitespaces();\n ch = this.#scanner.peek();\n if (allowComments) {\n this.skipComment();\n ch = this.#scanner.peek();\n }\n if (isEOL(ch)) {\n this.readLineBreak();\n ch = this.#scanner.peek();\n lineBreaks++;\n this.lineIndent = 0;\n this.readIndent();\n ch = this.#scanner.peek();\n } else {\n break;\n }\n }\n if (checkIndent !== -1 && lineBreaks !== 0 && this.lineIndent < checkIndent) {\n this.dispatchWarning(\"deficient indentation\");\n }\n return lineBreaks;\n }\n testDocumentSeparator() {\n let ch = this.#scanner.peek();\n // Condition this.#scanner.position === this.lineStart is tested\n // in parent on each call, for efficiency. No needs to test here again.\n if ((ch === MINUS || ch === DOT) && ch === this.#scanner.peek(1) && ch === this.#scanner.peek(2)) {\n ch = this.#scanner.peek(3);\n if (ch === 0 || isWhiteSpaceOrEOL(ch)) {\n return true;\n }\n }\n return false;\n }\n readPlainScalar(tag, anchor, nodeIndent, withinFlowCollection) {\n let ch = this.#scanner.peek();\n if (isWhiteSpaceOrEOL(ch) || isFlowIndicator(ch) || ch === SHARP || ch === AMPERSAND || ch === ASTERISK || ch === EXCLAMATION || ch === VERTICAL_LINE || ch === GREATER_THAN || ch === SINGLE_QUOTE || ch === DOUBLE_QUOTE || ch === PERCENT || ch === COMMERCIAL_AT || ch === GRAVE_ACCENT) {\n return;\n }\n let following;\n if (ch === QUESTION || ch === MINUS) {\n following = this.#scanner.peek(1);\n if (isWhiteSpaceOrEOL(following) || withinFlowCollection && isFlowIndicator(following)) {\n return;\n }\n }\n let result = \"\";\n let captureEnd = this.#scanner.position;\n let captureStart = this.#scanner.position;\n let hasPendingContent = false;\n let line = 0;\n while(ch !== 0){\n if (ch === COLON) {\n following = this.#scanner.peek(1);\n if (isWhiteSpaceOrEOL(following) || withinFlowCollection && isFlowIndicator(following)) {\n break;\n }\n } else if (ch === SHARP) {\n const preceding = this.#scanner.peek(-1);\n if (isWhiteSpaceOrEOL(preceding)) {\n break;\n }\n } else if (this.#scanner.position === this.lineStart && this.testDocumentSeparator() || withinFlowCollection && isFlowIndicator(ch)) {\n break;\n } else if (isEOL(ch)) {\n line = this.line;\n const lineStart = this.lineStart;\n const lineIndent = this.lineIndent;\n this.skipSeparationSpace(false, -1);\n if (this.lineIndent >= nodeIndent) {\n hasPendingContent = true;\n ch = this.#scanner.peek();\n continue;\n } else {\n this.#scanner.position = captureEnd;\n this.line = line;\n this.lineStart = lineStart;\n this.lineIndent = lineIndent;\n break;\n }\n }\n if (hasPendingContent) {\n const segment = this.captureSegment(captureStart, captureEnd, false);\n if (segment) result += segment;\n result += writeFoldedLines(this.line - line);\n captureStart = captureEnd = this.#scanner.position;\n hasPendingContent = false;\n }\n if (!isWhiteSpace(ch)) {\n captureEnd = this.#scanner.position + 1;\n }\n this.#scanner.next();\n ch = this.#scanner.peek();\n }\n const segment = this.captureSegment(captureStart, captureEnd, false);\n if (segment) result += segment;\n if (anchor !== null) this.anchorMap.set(anchor, result);\n if (result) return {\n tag,\n anchor,\n kind: \"scalar\",\n result\n };\n }\n readSingleQuotedScalar(tag, anchor, nodeIndent) {\n let ch = this.#scanner.peek();\n if (ch !== SINGLE_QUOTE) return;\n let result = \"\";\n this.#scanner.next();\n let captureStart = this.#scanner.position;\n let captureEnd = this.#scanner.position;\n ch = this.#scanner.peek();\n while(ch !== 0){\n if (ch === SINGLE_QUOTE) {\n const segment = this.captureSegment(captureStart, this.#scanner.position, true);\n if (segment) result += segment;\n this.#scanner.next();\n ch = this.#scanner.peek();\n if (ch === SINGLE_QUOTE) {\n captureStart = this.#scanner.position;\n this.#scanner.next();\n captureEnd = this.#scanner.position;\n } else {\n if (anchor !== null) this.anchorMap.set(anchor, result);\n return {\n tag,\n anchor,\n kind: \"scalar\",\n result\n };\n }\n } else if (isEOL(ch)) {\n const segment = this.captureSegment(captureStart, captureEnd, true);\n if (segment) result += segment;\n result += writeFoldedLines(this.skipSeparationSpace(false, nodeIndent));\n captureStart = captureEnd = this.#scanner.position;\n } else if (this.#scanner.position === this.lineStart && this.testDocumentSeparator()) {\n throw this.#createError(\"Unexpected end of the document within a single quoted scalar\");\n } else {\n this.#scanner.next();\n captureEnd = this.#scanner.position;\n }\n ch = this.#scanner.peek();\n }\n throw this.#createError(\"Unexpected end of the stream within a single quoted scalar\");\n }\n readDoubleQuotedScalar(tag, anchor, nodeIndent) {\n let ch = this.#scanner.peek();\n if (ch !== DOUBLE_QUOTE) return;\n let result = \"\";\n this.#scanner.next();\n let captureEnd = this.#scanner.position;\n let captureStart = this.#scanner.position;\n let tmp;\n ch = this.#scanner.peek();\n while(ch !== 0){\n if (ch === DOUBLE_QUOTE) {\n const segment = this.captureSegment(captureStart, this.#scanner.position, true);\n if (segment) result += segment;\n this.#scanner.next();\n if (anchor !== null) this.anchorMap.set(anchor, result);\n return {\n tag,\n anchor,\n kind: \"scalar\",\n result\n };\n }\n if (ch === BACKSLASH) {\n const segment = this.captureSegment(captureStart, this.#scanner.position, true);\n if (segment) result += segment;\n this.#scanner.next();\n ch = this.#scanner.peek();\n if (isEOL(ch)) {\n this.skipSeparationSpace(false, nodeIndent);\n } else if (ch < 256 && SIMPLE_ESCAPE_SEQUENCES.has(ch)) {\n result += SIMPLE_ESCAPE_SEQUENCES.get(ch);\n this.#scanner.next();\n } else if ((tmp = ESCAPED_HEX_LENGTHS.get(ch) ?? 0) > 0) {\n let hexLength = tmp;\n let hexResult = 0;\n for(; hexLength > 0; hexLength--){\n this.#scanner.next();\n ch = this.#scanner.peek();\n if ((tmp = hexCharCodeToNumber(ch)) >= 0) {\n hexResult = (hexResult << 4) + tmp;\n } else {\n throw this.#createError(\"Cannot read double quoted scalar: expected hexadecimal character\");\n }\n }\n result += codepointToChar(hexResult);\n this.#scanner.next();\n } else {\n throw this.#createError(\"Cannot read double quoted scalar: unknown escape sequence\");\n }\n captureStart = captureEnd = this.#scanner.position;\n } else if (isEOL(ch)) {\n const segment = this.captureSegment(captureStart, captureEnd, true);\n if (segment) result += segment;\n result += writeFoldedLines(this.skipSeparationSpace(false, nodeIndent));\n captureStart = captureEnd = this.#scanner.position;\n } else if (this.#scanner.position === this.lineStart && this.testDocumentSeparator()) {\n throw this.#createError(\"Unexpected end of the document within a double quoted scalar\");\n } else {\n this.#scanner.next();\n captureEnd = this.#scanner.position;\n }\n ch = this.#scanner.peek();\n }\n throw this.#createError(\"Unexpected end of the stream within a double quoted scalar\");\n }\n readFlowCollection(tag, anchor, nodeIndent) {\n let ch = this.#scanner.peek();\n let terminator;\n let isMapping = true;\n let result = {};\n if (ch === LEFT_SQUARE_BRACKET) {\n terminator = RIGHT_SQUARE_BRACKET;\n isMapping = false;\n result = [];\n } else if (ch === LEFT_CURLY_BRACKET) {\n terminator = RIGHT_CURLY_BRACKET;\n } else {\n return;\n }\n if (anchor !== null) this.anchorMap.set(anchor, result);\n this.#scanner.next();\n ch = this.#scanner.peek();\n let readNext = true;\n let valueNode = null;\n let keyNode = null;\n let keyTag = null;\n let isExplicitPair = false;\n let isPair = false;\n let following = 0;\n let line = 0;\n const overridableKeys = new Set();\n while(ch !== 0){\n this.skipSeparationSpace(true, nodeIndent);\n ch = this.#scanner.peek();\n if (ch === terminator) {\n this.#scanner.next();\n const kind = isMapping ? \"mapping\" : \"sequence\";\n return {\n tag,\n anchor,\n kind,\n result\n };\n }\n if (!readNext) {\n throw this.#createError(\"Cannot read flow collection: missing comma between flow collection entries\");\n }\n keyTag = keyNode = valueNode = null;\n isPair = isExplicitPair = false;\n if (ch === QUESTION) {\n following = this.#scanner.peek(1);\n if (isWhiteSpaceOrEOL(following)) {\n isPair = isExplicitPair = true;\n this.#scanner.next();\n this.skipSeparationSpace(true, nodeIndent);\n }\n }\n line = this.line;\n const newState = this.composeNode({\n parentIndent: nodeIndent,\n nodeContext: CONTEXT_FLOW_IN,\n allowToSeek: false,\n allowCompact: true\n });\n if (newState) {\n keyTag = newState.tag || null;\n keyNode = newState.result;\n }\n this.skipSeparationSpace(true, nodeIndent);\n ch = this.#scanner.peek();\n if ((isExplicitPair || this.line === line) && ch === COLON) {\n isPair = true;\n this.#scanner.next();\n ch = this.#scanner.peek();\n this.skipSeparationSpace(true, nodeIndent);\n const newState = this.composeNode({\n parentIndent: nodeIndent,\n nodeContext: CONTEXT_FLOW_IN,\n allowToSeek: false,\n allowCompact: true\n });\n if (newState) valueNode = newState.result;\n }\n if (isMapping) {\n this.storeMappingPair(result, overridableKeys, keyTag, keyNode, valueNode);\n } else if (isPair) {\n result.push(this.storeMappingPair({}, overridableKeys, keyTag, keyNode, valueNode));\n } else {\n result.push(keyNode);\n }\n this.skipSeparationSpace(true, nodeIndent);\n ch = this.#scanner.peek();\n if (ch === COMMA) {\n readNext = true;\n this.#scanner.next();\n ch = this.#scanner.peek();\n } else {\n readNext = false;\n }\n }\n throw this.#createError(\"Cannot read flow collection: unexpected end of the stream within a flow collection\");\n }\n // Handles block scaler styles: e.g. '|', '>', '|-' and '>-'.\n // https://yaml.org/spec/1.2.2/#81-block-scalar-styles\n readBlockScalar(tag, anchor, nodeIndent) {\n let chomping = CHOMPING_CLIP;\n let didReadContent = false;\n let detectedIndent = false;\n let textIndent = nodeIndent;\n let emptyLines = 0;\n let atMoreIndented = false;\n let ch = this.#scanner.peek();\n let folding = false;\n if (ch === VERTICAL_LINE) {\n folding = false;\n } else if (ch === GREATER_THAN) {\n folding = true;\n } else {\n return;\n }\n let result = \"\";\n let tmp = 0;\n while(ch !== 0){\n this.#scanner.next();\n ch = this.#scanner.peek();\n if (ch === PLUS || ch === MINUS) {\n if (CHOMPING_CLIP === chomping) {\n chomping = ch === PLUS ? CHOMPING_KEEP : CHOMPING_STRIP;\n } else {\n throw this.#createError(\"Cannot read block: chomping mode identifier repeated\");\n }\n } else if ((tmp = decimalCharCodeToNumber(ch)) >= 0) {\n if (tmp === 0) {\n throw this.#createError(\"Cannot read block: indentation width must be greater than 0\");\n } else if (!detectedIndent) {\n textIndent = nodeIndent + tmp - 1;\n detectedIndent = true;\n } else {\n throw this.#createError(\"Cannot read block: indentation width identifier repeated\");\n }\n } else {\n break;\n }\n }\n if (isWhiteSpace(ch)) {\n this.skipWhitespaces();\n this.skipComment();\n ch = this.#scanner.peek();\n }\n while(ch !== 0){\n this.readLineBreak();\n this.lineIndent = 0;\n ch = this.#scanner.peek();\n while((!detectedIndent || this.lineIndent < textIndent) && ch === SPACE){\n this.lineIndent++;\n this.#scanner.next();\n ch = this.#scanner.peek();\n }\n if (!detectedIndent && this.lineIndent > textIndent) {\n textIndent = this.lineIndent;\n }\n if (isEOL(ch)) {\n emptyLines++;\n continue;\n }\n // End of the scalar.\n if (this.lineIndent < textIndent) {\n // Perform the chomping.\n if (chomping === CHOMPING_KEEP) {\n result += \"\\n\".repeat(didReadContent ? 1 + emptyLines : emptyLines);\n } else if (chomping === CHOMPING_CLIP) {\n if (didReadContent) {\n // i.e. only if the scalar is not empty.\n result += \"\\n\";\n }\n }\n break;\n }\n // Folded style: use fancy rules to handle line breaks.\n if (folding) {\n // Lines starting with white space characters (more-indented lines) are not folded.\n if (isWhiteSpace(ch)) {\n atMoreIndented = true;\n // except for the first content line (cf. Example 8.1)\n result += \"\\n\".repeat(didReadContent ? 1 + emptyLines : emptyLines);\n // End of more-indented block.\n } else if (atMoreIndented) {\n atMoreIndented = false;\n result += \"\\n\".repeat(emptyLines + 1);\n // Just one line break - perceive as the same line.\n } else if (emptyLines === 0) {\n if (didReadContent) {\n // i.e. only if we have already read some scalar content.\n result += \" \";\n }\n // Several line breaks - perceive as different lines.\n } else {\n result += \"\\n\".repeat(emptyLines);\n }\n // Literal style: just add exact number of line breaks between content lines.\n } else {\n // Keep all line breaks except the header line break.\n result += \"\\n\".repeat(didReadContent ? 1 + emptyLines : emptyLines);\n }\n didReadContent = true;\n detectedIndent = true;\n emptyLines = 0;\n const captureStart = this.#scanner.position;\n while(!isEOL(ch) && ch !== 0){\n this.#scanner.next();\n ch = this.#scanner.peek();\n }\n const segment = this.captureSegment(captureStart, this.#scanner.position, false);\n if (segment) result += segment;\n }\n if (anchor !== null) this.anchorMap.set(anchor, result);\n return {\n tag,\n anchor,\n kind: \"scalar\",\n result\n };\n }\n readBlockMapping(tag, anchor, nodeIndent, flowIndent) {\n const result = {};\n const overridableKeys = new Set();\n let allowCompact = false;\n let line;\n let pos;\n let keyTag = null;\n let keyNode = null;\n let valueNode = null;\n let atExplicitKey = false;\n let detected = false;\n if (anchor !== null) this.anchorMap.set(anchor, result);\n let ch = this.#scanner.peek();\n while(ch !== 0){\n const following = this.#scanner.peek(1);\n line = this.line; // Save the current line.\n pos = this.#scanner.position;\n //\n // Explicit notation case. There are two separate blocks:\n // first for the key (denoted by \"?\") and second for the value (denoted by \":\")\n //\n if ((ch === QUESTION || ch === COLON) && isWhiteSpaceOrEOL(following)) {\n if (ch === QUESTION) {\n if (atExplicitKey) {\n this.storeMappingPair(result, overridableKeys, keyTag, keyNode, null);\n keyTag = null;\n keyNode = null;\n valueNode = null;\n }\n detected = true;\n atExplicitKey = true;\n allowCompact = true;\n } else if (atExplicitKey) {\n // i.e. 0x3A/* : */ === character after the explicit key.\n atExplicitKey = false;\n allowCompact = true;\n } else {\n throw this.#createError(\"Cannot read block as explicit mapping pair is incomplete: a key node is missed or followed by a non-tabulated empty line\");\n }\n this.#scanner.next();\n ch = following;\n //\n // Implicit notation case. Flow-style node as the key first, then \":\", and the value.\n //\n } else {\n const newState = this.composeNode({\n parentIndent: flowIndent,\n nodeContext: CONTEXT_FLOW_OUT,\n allowToSeek: false,\n allowCompact: true\n });\n if (!newState) break; // Reading is done. Go to the epilogue.\n if (this.line === line) {\n ch = this.#scanner.peek();\n this.skipWhitespaces();\n ch = this.#scanner.peek();\n if (ch === COLON) {\n this.#scanner.next();\n ch = this.#scanner.peek();\n if (!isWhiteSpaceOrEOL(ch)) {\n throw this.#createError(\"Cannot read block: a whitespace character is expected after the key-value separator within a block mapping\");\n }\n if (atExplicitKey) {\n this.storeMappingPair(result, overridableKeys, keyTag, keyNode, null);\n keyTag = null;\n keyNode = null;\n valueNode = null;\n }\n detected = true;\n atExplicitKey = false;\n allowCompact = false;\n keyTag = newState.tag;\n keyNode = newState.result;\n } else if (detected) {\n throw this.#createError(\"Cannot read an implicit mapping pair: missing colon\");\n } else {\n const { kind, result } = newState;\n return {\n tag,\n anchor,\n kind,\n result\n }; // Keep the result of `composeNode`.\n }\n } else if (detected) {\n throw this.#createError(\"Cannot read a block mapping entry: a multiline key may not be an implicit key\");\n } else {\n const { kind, result } = newState;\n return {\n tag,\n anchor,\n kind,\n result\n }; // Keep the result of `composeNode`.\n }\n }\n //\n // Common reading code for both explicit and implicit notations.\n //\n if (this.line === line || this.lineIndent > nodeIndent) {\n const newState = this.composeNode({\n parentIndent: nodeIndent,\n nodeContext: CONTEXT_BLOCK_OUT,\n allowToSeek: true,\n allowCompact\n });\n if (newState) {\n if (atExplicitKey) {\n keyNode = newState.result;\n } else {\n valueNode = newState.result;\n }\n }\n if (!atExplicitKey) {\n this.storeMappingPair(result, overridableKeys, keyTag, keyNode, valueNode, line, pos);\n keyTag = keyNode = valueNode = null;\n }\n this.skipSeparationSpace(true, -1);\n ch = this.#scanner.peek();\n }\n if (this.lineIndent > nodeIndent && ch !== 0) {\n throw this.#createError(\"Cannot read block: bad indentation of a mapping entry\");\n } else if (this.lineIndent < nodeIndent) {\n break;\n }\n }\n //\n // Epilogue.\n //\n // Special case: last mapping's node contains only the key in explicit notation.\n if (atExplicitKey) {\n this.storeMappingPair(result, overridableKeys, keyTag, keyNode, null);\n }\n // Expose the resulting mapping.\n if (detected) return {\n tag,\n anchor,\n kind: \"mapping\",\n result\n };\n }\n readTagProperty(tag) {\n let isVerbatim = false;\n let isNamed = false;\n let tagHandle = \"\";\n let tagName;\n let ch = this.#scanner.peek();\n if (ch !== EXCLAMATION) return;\n if (tag !== null) {\n throw this.#createError(\"Cannot read tag property: duplication of a tag property\");\n }\n this.#scanner.next();\n ch = this.#scanner.peek();\n if (ch === SMALLER_THAN) {\n isVerbatim = true;\n this.#scanner.next();\n ch = this.#scanner.peek();\n } else if (ch === EXCLAMATION) {\n isNamed = true;\n tagHandle = \"!!\";\n this.#scanner.next();\n ch = this.#scanner.peek();\n } else {\n tagHandle = \"!\";\n }\n let position = this.#scanner.position;\n if (isVerbatim) {\n do {\n this.#scanner.next();\n ch = this.#scanner.peek();\n }while (ch !== 0 && ch !== GREATER_THAN)\n if (!this.#scanner.eof()) {\n tagName = this.#scanner.source.slice(position, this.#scanner.position);\n this.#scanner.next();\n ch = this.#scanner.peek();\n } else {\n throw this.#createError(\"Cannot read tag property: unexpected end of stream\");\n }\n } else {\n while(ch !== 0 && !isWhiteSpaceOrEOL(ch)){\n if (ch === EXCLAMATION) {\n if (!isNamed) {\n tagHandle = this.#scanner.source.slice(position - 1, this.#scanner.position + 1);\n if (!PATTERN_TAG_HANDLE.test(tagHandle)) {\n throw this.#createError(\"Cannot read tag property: named tag handle contains invalid characters\");\n }\n isNamed = true;\n position = this.#scanner.position + 1;\n } else {\n throw this.#createError(\"Cannot read tag property: tag suffix cannot contain an exclamation mark\");\n }\n }\n this.#scanner.next();\n ch = this.#scanner.peek();\n }\n tagName = this.#scanner.source.slice(position, this.#scanner.position);\n if (PATTERN_FLOW_INDICATORS.test(tagName)) {\n throw this.#createError(\"Cannot read tag property: tag suffix cannot contain flow indicator characters\");\n }\n }\n if (tagName && !PATTERN_TAG_URI.test(tagName)) {\n throw this.#createError(`Cannot read tag property: invalid characters in tag name \"${tagName}\"`);\n }\n if (isVerbatim) {\n return tagName;\n } else if (this.tagMap.has(tagHandle)) {\n return this.tagMap.get(tagHandle) + tagName;\n } else if (tagHandle === \"!\") {\n return `!${tagName}`;\n } else if (tagHandle === \"!!\") {\n return `tag:yaml.org,2002:${tagName}`;\n }\n throw this.#createError(`Cannot read tag property: undeclared tag handle \"${tagHandle}\"`);\n }\n readAnchorProperty(anchor) {\n let ch = this.#scanner.peek();\n if (ch !== AMPERSAND) return;\n if (anchor !== null) {\n throw this.#createError(\"Cannot read anchor property: duplicate anchor property\");\n }\n this.#scanner.next();\n ch = this.#scanner.peek();\n const position = this.#scanner.position;\n while(ch !== 0 && !isWhiteSpaceOrEOL(ch) && !isFlowIndicator(ch)){\n this.#scanner.next();\n ch = this.#scanner.peek();\n }\n if (this.#scanner.position === position) {\n throw this.#createError(\"Cannot read anchor property: name of an anchor node must contain at least one character\");\n }\n return this.#scanner.source.slice(position, this.#scanner.position);\n }\n readAlias() {\n if (this.#scanner.peek() !== ASTERISK) return;\n this.#scanner.next();\n let ch = this.#scanner.peek();\n const position = this.#scanner.position;\n while(ch !== 0 && !isWhiteSpaceOrEOL(ch) && !isFlowIndicator(ch)){\n this.#scanner.next();\n ch = this.#scanner.peek();\n }\n if (this.#scanner.position === position) {\n throw this.#createError(\"Cannot read alias: alias name must contain at least one character\");\n }\n const alias = this.#scanner.source.slice(position, this.#scanner.position);\n if (!this.anchorMap.has(alias)) {\n throw this.#createError(`Cannot read alias: unidentified alias \"${alias}\"`);\n }\n this.skipSeparationSpace(true, -1);\n return this.anchorMap.get(alias);\n }\n resolveTag(state) {\n switch(state.tag){\n case null:\n case \"!\":\n return state;\n case \"?\":\n {\n for (const type of this.implicitTypes){\n // Implicit resolving is not allowed for non-scalar types, and '?'\n // non-specific tag is only assigned to plain scalars. So, it isn't\n // needed to check for 'kind' conformity.\n if (!type.resolve(state.result)) continue;\n // `state.result` updated in resolver if matched\n const result = type.construct(state.result);\n state.result = result;\n state.tag = type.tag;\n const { anchor } = state;\n if (anchor !== null) this.anchorMap.set(anchor, result);\n return state;\n }\n return state;\n }\n }\n const kind = state.kind ?? \"fallback\";\n const map = this.typeMap[kind];\n const type = map.get(state.tag);\n if (!type) {\n throw this.#createError(`Cannot resolve unknown tag !<${state.tag}>`);\n }\n if (state.result !== null && type.kind !== state.kind) {\n throw this.#createError(`Unacceptable node kind for !<${state.tag}> tag: it should be \"${type.kind}\", not \"${state.kind}\"`);\n }\n if (!type.resolve(state.result)) {\n // `state.result` updated in resolver if matched\n throw this.#createError(`Cannot resolve a node with !<${state.tag}> explicit tag`);\n }\n const result = type.construct(state.result);\n state.result = result;\n const { anchor } = state;\n if (anchor !== null) this.anchorMap.set(anchor, result);\n return state;\n }\n composeNode({ parentIndent, nodeContext, allowToSeek, allowCompact }) {\n let indentStatus = 1; // 1: this>parent, 0: this=parent, -1: this<parent\n let atNewLine = false;\n const allowBlockScalars = CONTEXT_BLOCK_OUT === nodeContext || CONTEXT_BLOCK_IN === nodeContext;\n let allowBlockCollections = allowBlockScalars;\n const allowBlockStyles = allowBlockScalars;\n if (allowToSeek) {\n if (this.skipSeparationSpace(true, -1)) {\n atNewLine = true;\n indentStatus = getIndentStatus(this.lineIndent, parentIndent);\n }\n }\n let tag = null;\n let anchor = null;\n if (indentStatus === 1) {\n while(true){\n const newTag = this.readTagProperty(tag);\n if (newTag) {\n tag = newTag;\n } else {\n const newAnchor = this.readAnchorProperty(anchor);\n if (!newAnchor) break;\n anchor = newAnchor;\n }\n if (this.skipSeparationSpace(true, -1)) {\n atNewLine = true;\n allowBlockCollections = allowBlockStyles;\n indentStatus = getIndentStatus(this.lineIndent, parentIndent);\n } else {\n allowBlockCollections = false;\n }\n }\n }\n if (allowBlockCollections) {\n allowBlockCollections = atNewLine || allowCompact;\n }\n if (indentStatus === 1) {\n const cond = CONTEXT_FLOW_IN === nodeContext || CONTEXT_FLOW_OUT === nodeContext;\n const flowIndent = cond ? parentIndent : parentIndent + 1;\n if (allowBlockCollections) {\n const blockIndent = this.#scanner.position - this.lineStart;\n const blockSequenceState = this.readBlockSequence(tag, anchor, blockIndent);\n if (blockSequenceState) return this.resolveTag(blockSequenceState);\n const blockMappingState = this.readBlockMapping(tag, anchor, blockIndent, flowIndent);\n if (blockMappingState) return this.resolveTag(blockMappingState);\n }\n const flowCollectionState = this.readFlowCollection(tag, anchor, flowIndent);\n if (flowCollectionState) return this.resolveTag(flowCollectionState);\n if (allowBlockScalars) {\n const blockScalarState = this.readBlockScalar(tag, anchor, flowIndent);\n if (blockScalarState) return this.resolveTag(blockScalarState);\n }\n const singleQuoteState = this.readSingleQuotedScalar(tag, anchor, flowIndent);\n if (singleQuoteState) return this.resolveTag(singleQuoteState);\n const doubleQuoteState = this.readDoubleQuotedScalar(tag, anchor, flowIndent);\n if (doubleQuoteState) return this.resolveTag(doubleQuoteState);\n const alias = this.readAlias();\n if (alias) {\n if (tag !== null || anchor !== null) {\n throw this.#createError(\"Cannot compose node: alias node should not have any properties\");\n }\n return this.resolveTag({\n tag,\n anchor,\n kind: null,\n result: alias\n });\n }\n const plainScalarState = this.readPlainScalar(tag, anchor, flowIndent, CONTEXT_FLOW_IN === nodeContext);\n if (plainScalarState) {\n plainScalarState.tag ??= \"?\";\n return this.resolveTag(plainScalarState);\n }\n } else if (indentStatus === 0 && CONTEXT_BLOCK_OUT === nodeContext && allowBlockCollections) {\n // Special case: block sequences are allowed to have same indentation level as the parent.\n // http://www.yaml.org/spec/1.2/spec.html#id2799784\n const blockIndent = this.#scanner.position - this.lineStart;\n const newState = this.readBlockSequence(tag, anchor, blockIndent);\n if (newState) return this.resolveTag(newState);\n }\n const newState = this.resolveTag({\n tag,\n anchor,\n kind: null,\n result: null\n });\n if (newState.tag !== null || newState.anchor !== null) return newState;\n }\n readDirectives() {\n let hasDirectives = false;\n let version = null;\n let ch = this.#scanner.peek();\n while(ch !== 0){\n this.skipSeparationSpace(true, -1);\n ch = this.#scanner.peek();\n if (this.lineIndent > 0 || ch !== PERCENT) {\n break;\n }\n hasDirectives = true;\n this.#scanner.next();\n ch = this.#scanner.peek();\n let position = this.#scanner.position;\n while(ch !== 0 && !isWhiteSpaceOrEOL(ch)){\n this.#scanner.next();\n ch = this.#scanner.peek();\n }\n const directiveName = this.#scanner.source.slice(position, this.#scanner.position);\n const directiveArgs = [];\n if (directiveName.length < 1) {\n throw this.#createError(\"Cannot read document: directive name length must be greater than zero\");\n }\n while(ch !== 0){\n this.skipWhitespaces();\n this.skipComment();\n ch = this.#scanner.peek();\n if (isEOL(ch)) break;\n position = this.#scanner.position;\n while(ch !== 0 && !isWhiteSpaceOrEOL(ch)){\n this.#scanner.next();\n ch = this.#scanner.peek();\n }\n directiveArgs.push(this.#scanner.source.slice(position, this.#scanner.position));\n }\n if (ch !== 0) this.readLineBreak();\n switch(directiveName){\n case \"YAML\":\n if (version !== null) {\n throw this.#createError(\"Cannot handle YAML directive: duplication of %YAML directive\");\n }\n version = this.yamlDirectiveHandler(directiveArgs);\n break;\n case \"TAG\":\n this.tagDirectiveHandler(directiveArgs);\n break;\n default:\n this.dispatchWarning(`unknown document directive \"${directiveName}\"`);\n break;\n }\n ch = this.#scanner.peek();\n }\n return hasDirectives;\n }\n readDocument() {\n const documentStart = this.#scanner.position;\n this.checkLineBreaks = false;\n this.tagMap = new Map();\n this.anchorMap = new Map();\n const hasDirectives = this.readDirectives();\n this.skipSeparationSpace(true, -1);\n let result = null;\n if (this.lineIndent === 0 && this.#scanner.peek() === MINUS && this.#scanner.peek(1) === MINUS && this.#scanner.peek(2) === MINUS) {\n this.#scanner.position += 3;\n this.skipSeparationSpace(true, -1);\n } else if (hasDirectives) {\n throw this.#createError(\"Cannot read document: directives end mark is expected\");\n }\n const newState = this.composeNode({\n parentIndent: this.lineIndent - 1,\n nodeContext: CONTEXT_BLOCK_OUT,\n allowToSeek: false,\n allowCompact: true\n });\n if (newState) result = newState.result;\n this.skipSeparationSpace(true, -1);\n if (this.checkLineBreaks && PATTERN_NON_ASCII_LINE_BREAKS.test(this.#scanner.source.slice(documentStart, this.#scanner.position))) {\n this.dispatchWarning(\"non-ASCII line breaks are interpreted as content\");\n }\n if (this.#scanner.position === this.lineStart && this.testDocumentSeparator()) {\n if (this.#scanner.peek() === DOT) {\n this.#scanner.position += 3;\n this.skipSeparationSpace(true, -1);\n }\n } else if (!this.#scanner.eof()) {\n throw this.#createError(\"Cannot read document: end of the stream or a document separator is expected\");\n }\n return result;\n }\n *readDocuments() {\n while(!this.#scanner.eof()){\n yield this.readDocument();\n }\n }\n}\n//# sourceMappingURL=_loader_state.js.map","// Ported from js-yaml v3.13.1:\n// https://github.com/nodeca/js-yaml/commit/665aadda42349dcae869f12040d9b10ef18d12da\n// Copyright 2011-2015 by Vitaly Puzrin. All rights reserved. MIT license.\n// Copyright 2018-2026 the Deno authors. MIT license.\n// This module is browser compatible.\nimport { isEOL } from \"./_chars.js\";\nimport { LoaderState } from \"./_loader_state.js\";\nimport { SCHEMA_MAP } from \"./_schema.js\";\nfunction sanitizeInput(input) {\n input = String(input);\n if (input.length > 0) {\n // Add trailing `\\n` if not exists\n if (!isEOL(input.charCodeAt(input.length - 1))) input += \"\\n\";\n // Strip BOM\n if (input.charCodeAt(0) === 0xfeff) input = input.slice(1);\n }\n return input;\n}\n/**\n * Parse and return a YAML string as a parsed YAML document object.\n *\n * Note: This does not support functions. Untrusted data is safe to parse.\n *\n * @example Usage\n * ```ts\n * import { parse } from \"@std/yaml/parse\";\n * import { assertEquals } from \"@std/assert\";\n *\n * const data = parse(`\n * id: 1\n * name: Alice\n * `);\n *\n * assertEquals(data, { id: 1, name: \"Alice\" });\n * ```\n *\n * @throws {SyntaxError} Throws error on invalid YAML.\n * @param content YAML string to parse.\n * @param options Parsing options.\n * @returns Parsed document.\n */ export function parse(content, options = {}) {\n content = sanitizeInput(content);\n const state = new LoaderState(content, {\n ...options,\n schema: SCHEMA_MAP.get(options.schema)\n });\n const documentGenerator = state.readDocuments();\n const document = documentGenerator.next().value;\n if (!documentGenerator.next().done) {\n throw new SyntaxError(\"Found more than 1 document in the stream: expected a single document\");\n }\n return document ?? null;\n}\n/**\n * Same as {@linkcode parse}, but understands multi-document YAML sources, and\n * returns multiple parsed YAML document objects.\n *\n * @example Usage\n * ```ts\n * import { parseAll } from \"@std/yaml/parse\";\n * import { assertEquals } from \"@std/assert\";\n *\n * const data = parseAll(`\n * ---\n * id: 1\n * name: Alice\n * ---\n * id: 2\n * name: Bob\n * ---\n * id: 3\n * name: Eve\n * `);\n * assertEquals(data, [ { id: 1, name: \"Alice\" }, { id: 2, name: \"Bob\" }, { id: 3, name: \"Eve\" }]);\n * ```\n *\n * @param content YAML string to parse.\n * @param options Parsing options.\n * @returns Array of parsed documents.\n */ export function parseAll(content, options = {}) {\n content = sanitizeInput(content);\n const state = new LoaderState(content, {\n ...options,\n schema: SCHEMA_MAP.get(options.schema)\n });\n return [\n ...state.readDocuments()\n ];\n}\n//# sourceMappingURL=parse.js.map","/**\n * Core module for @tmpl/core\n *\n * This module exports the core functionality of the @tmpl/core library.\n */ var _computedKey;\nimport * as YAML from \"@jsr/std__yaml\";\n/**\n * Comprehensive list of supported languages\n */ export const LANGUAGES = {\n // Web languages\n html: {\n extension: \"html\",\n aliases: [\n \"htm\"\n ],\n mimeType: \"text/html\",\n description: \"HTML markup language\"\n },\n css: {\n extension: \"css\",\n mimeType: \"text/css\",\n description: \"Cascading Style Sheets\"\n },\n js: {\n extension: \"js\",\n aliases: [\n \"javascript\"\n ],\n mimeType: \"application/javascript\",\n description: \"JavaScript programming language\"\n },\n ts: {\n extension: \"ts\",\n aliases: [\n \"typescript\"\n ],\n mimeType: \"application/typescript\",\n description: \"TypeScript programming language\"\n },\n jsx: {\n extension: \"jsx\",\n mimeType: \"text/jsx\",\n description: \"JavaScript XML\"\n },\n tsx: {\n extension: \"tsx\",\n mimeType: \"text/tsx\",\n description: \"TypeScript XML\"\n },\n // Data formats\n json: {\n extension: \"json\",\n mimeType: \"application/json\",\n description: \"JavaScript Object Notation\"\n },\n xml: {\n extension: \"xml\",\n mimeType: \"application/xml\",\n description: \"Extensible Markup Language\"\n },\n yml: {\n extension: \"yaml\",\n aliases: [\n \"yaml\"\n ],\n mimeType: \"application/yaml\",\n description: \"YAML Ain't Markup Language\"\n },\n yaml: {\n extension: \"yaml\",\n aliases: [\n \"yml\"\n ],\n mimeType: \"application/yaml\",\n description: \"YAML Ain't Markup Language\"\n },\n toml: {\n extension: \"toml\",\n mimeType: \"application/toml\",\n description: \"Tom's Obvious, Minimal Language\"\n },\n ini: {\n extension: \"ini\",\n mimeType: \"text/plain\",\n description: \"Configuration file format\"\n },\n csv: {\n extension: \"csv\",\n mimeType: \"text/csv\",\n description: \"Comma-Separated Values\"\n },\n // Markup languages\n md: {\n extension: \"md\",\n aliases: [\n \"markdown\"\n ],\n mimeType: \"text/markdown\",\n description: \"Markdown markup language\"\n },\n tex: {\n extension: \"tex\",\n aliases: [\n \"latex\"\n ],\n mimeType: \"application/x-tex\",\n description: \"LaTeX document preparation system\"\n },\n rst: {\n extension: \"rst\",\n mimeType: \"text/x-rst\",\n description: \"reStructuredText markup language\"\n },\n // Query languages\n sql: {\n extension: \"sql\",\n mimeType: \"application/sql\",\n description: \"Structured Query Language\"\n },\n graphql: {\n extension: \"graphql\",\n aliases: [\n \"gql\"\n ],\n mimeType: \"application/graphql\",\n description: \"GraphQL query language\"\n },\n // Shell scripting\n sh: {\n extension: \"sh\",\n aliases: [\n \"bash\",\n \"shell\"\n ],\n mimeType: \"application/x-sh\",\n description: \"Shell script\"\n },\n ps1: {\n extension: \"ps1\",\n mimeType: \"application/x-powershell\",\n description: \"PowerShell script\"\n },\n bat: {\n extension: \"bat\",\n aliases: [\n \"cmd\"\n ],\n mimeType: \"application/x-bat\",\n description: \"Windows Batch file\"\n },\n // Programming languages\n py: {\n extension: \"py\",\n aliases: [\n \"python\"\n ],\n mimeType: \"text/x-python\",\n description: \"Python programming language\"\n },\n rb: {\n extension: \"rb\",\n aliases: [\n \"ruby\"\n ],\n mimeType: \"text/x-ruby\",\n description: \"Ruby programming language\"\n },\n go: {\n extension: \"go\",\n aliases: [\n \"golang\"\n ],\n mimeType: \"text/x-go\",\n description: \"Go programming language\"\n },\n rs: {\n extension: \"rs\",\n aliases: [\n \"rust\"\n ],\n mimeType: \"text/x-rust\",\n description: \"Rust programming language\"\n },\n c: {\n extension: \"c\",\n mimeType: \"text/x-c\",\n description: \"C programming language\"\n },\n cpp: {\n extension: \"cpp\",\n aliases: [\n \"cc\",\n \"cxx\"\n ],\n mimeType: \"text/x-c++\",\n description: \"C++ programming language\"\n },\n cs: {\n extension: \"cs\",\n aliases: [\n \"csharp\"\n ],\n mimeType: \"text/x-csharp\",\n description: \"C# programming language\"\n },\n java: {\n extension: \"java\",\n mimeType: \"text/x-java\",\n description: \"Java programming language\"\n },\n php: {\n extension: \"php\",\n mimeType: \"application/x-php\",\n description: \"PHP programming language\"\n },\n swift: {\n extension: \"swift\",\n mimeType: \"text/x-swift\",\n description: \"Swift programming language\"\n },\n kt: {\n extension: \"kt\",\n aliases: [\n \"kotlin\"\n ],\n mimeType: \"text/x-kotlin\",\n description: \"Kotlin programming language\"\n },\n scala: {\n extension: \"scala\",\n mimeType: \"text/x-scala\",\n description: \"Scala programming language\"\n },\n dart: {\n extension: \"dart\",\n mimeType: \"text/x-dart\",\n description: \"Dart programming language\"\n },\n lua: {\n extension: \"lua\",\n mimeType: \"text/x-lua\",\n description: \"Lua programming language\"\n },\n pl: {\n extension: \"pl\",\n aliases: [\n \"perl\"\n ],\n mimeType: \"text/x-perl\",\n description: \"Perl programming language\"\n },\n r: {\n extension: \"r\",\n mimeType: \"text/x-r\",\n description: \"R programming language\"\n },\n elm: {\n extension: \"elm\",\n mimeType: \"text/x-elm\",\n description: \"Elm programming language\"\n },\n fs: {\n extension: \"fs\",\n aliases: [\n \"fsharp\"\n ],\n mimeType: \"text/x-fsharp\",\n description: \"F# programming language\"\n },\n clj: {\n extension: \"clj\",\n aliases: [\n \"clojure\"\n ],\n mimeType: \"text/x-clojure\",\n description: \"Clojure programming language\"\n },\n hs: {\n extension: \"hs\",\n aliases: [\n \"haskell\"\n ],\n mimeType: \"text/x-haskell\",\n description: \"Haskell programming language\"\n },\n // Configuration files\n dockerfile: {\n extension: \"dockerfile\",\n mimeType: \"text/x-dockerfile\",\n description: \"Docker configuration file\"\n },\n makefile: {\n extension: \"makefile\",\n mimeType: \"text/x-makefile\",\n description: \"Make build automation\"\n },\n mk: {\n extension: \"mk\",\n mimeType: \"text/x-makefile\",\n description: \"Make build automation\"\n },\n // Other\n svg: {\n extension: \"svg\",\n mimeType: \"image/svg+xml\",\n description: \"Scalable Vector Graphics\"\n },\n diff: {\n extension: \"diff\",\n aliases: [\n \"patch\"\n ],\n mimeType: \"text/x-diff\",\n description: \"Diff file format\"\n },\n proto: {\n extension: \"proto\",\n mimeType: \"text/x-protobuf\",\n description: \"Protocol Buffers\"\n },\n sol: {\n extension: \"sol\",\n aliases: [\n \"solidity\"\n ],\n mimeType: \"text/x-solidity\",\n description: \"Solidity smart contract language\"\n }\n};\n_computedKey = Symbol.for(\"Deno.customInspect\");\nexport class TemplateClass extends String {\n type;\n raw;\n data;\n error;\n parser;\n constructor(type, template, substitutions, parser, options){\n const substTemplateRaw = [];\n for(let i = 0; i < template.raw.length; i++){\n substTemplateRaw.push(template.raw[i].replace(/\\\\([`$])/g, \"$1\"));\n }\n const raw = String.raw({\n raw: substTemplateRaw\n }, ...substitutions);\n let str = raw.toString();\n // Handle indentation if specified\n if (typeof options?.indent === 'number') {\n if (options.indent > 0) {\n const indent = \" \".repeat(options.indent);\n str = str.replace(/^(?!\\s*$)/gm, indent);\n } else {\n str = str.replace(/^\\s+/gm, (match)=>{\n return match.slice(0, match.length + options.indent);\n });\n }\n }\n if (options?.indent === false) {\n // Get the first non-empty line to determine the indentation\n const lines = str.split(\"\\n\");\n const nonEmptyLine = lines.find((line)=>line.trim() !== \"\") || \"\";\n const indent = nonEmptyLine.match(/^\\s+/)?.[0]?.length || 0;\n str = lines.map((line)=>line.slice(indent)).join(\"\\n\");\n }\n super(str);\n this.type = type;\n this.raw = raw;\n this.parser = parser;\n }\n [_computedKey]() {\n return this.valueOf();\n }\n indent(value) {\n return new TemplateClass(this.type, {\n raw: [\n this.raw\n ]\n }, [], undefined, {\n indent: value\n });\n }\n noindent() {\n return this.indent(false);\n }\n throw() {\n if (this.error) {\n throw this.error;\n }\n return this;\n }\n parse(parser) {\n this.parser = parser ?? this.parser;\n try {\n this.data = this.parser?.(this.raw);\n } catch (error) {\n this.error = error;\n }\n return this;\n }\n}\n/**\n * Generic template function for any file extension\n *\n * @param extension The file extension (without the dot)\n * @returns A template tag function for the specified extension\n */ export function tag(type, parser, options) {\n return (template, ...substitutions)=>new TemplateClass(type, template, substitutions, parser, options);\n}\n// Export all template functions individually\n// Web languages\nexport const html = tag(\"html\");\nexport const css = tag(\"css\");\nexport const js = tag(\"js\");\nexport const ts = tag(\"ts\");\nexport const jsx = tag(\"jsx\");\nexport const tsx = tag(\"tsx\");\n// Data formats\nexport const json = tag(\"json\", JSON.parse);\nexport const xml = tag(\"xml\");\nexport const yaml = tag(\"yaml\", YAML.parse);\nexport const toml = tag(\"toml\");\nexport const ini = tag(\"ini\");\nexport const csv = tag(\"csv\");\n// Markup languages\nexport const md = tag(\"md\");\nexport const tex = tag(\"tex\");\nexport const rst = tag(\"rst\");\n// Query languages\nexport const sql = tag(\"sql\");\nexport const graphql = tag(\"graphql\");\n// Shell scripting\nexport const sh = tag(\"sh\");\nexport const ps1 = tag(\"ps1\");\nexport const bat = tag(\"bat\");\n// Programming languages\nexport const py = tag(\"py\");\nexport const rb = tag(\"rb\");\nexport const go = tag(\"go\");\nexport const rs = tag(\"rs\");\nexport const c = tag(\"c\");\nexport const cpp = tag(\"cpp\");\nexport const cs = tag(\"cs\");\nexport const java = tag(\"java\");\nexport const php = tag(\"php\");\nexport const swift = tag(\"swift\");\nexport const kt = tag(\"kt\");\nexport const scala = tag(\"scala\");\nexport const dart = tag(\"dart\");\nexport const lua = tag(\"lua\");\nexport const pl = tag(\"pl\");\nexport const r = tag(\"r\");\nexport const elm = tag(\"elm\");\nexport const fs = tag(\"fs\");\nexport const clj = tag(\"clj\");\nexport const hs = tag(\"hs\");\n// Configuration files\nexport const dockerfile = tag(\"dockerfile\");\nexport const makefile = tag(\"makefile\");\nexport const mk = tag(\"mk\");\n// Other\nexport const svg = tag(\"svg\");\nexport const diff = tag(\"diff\");\nexport const proto = tag(\"proto\");\nexport const sol = tag(\"sol\");\n//# sourceMappingURL=mod.js.map","import { tsx } from \"@tmpl/core\";\n\nexport const createDocumentFileTemplate = tsx`\nimport type { DocumentModelModule } from \"document-model\";\nimport {\n showCreateDocumentModal,\n useAllowedDocumentModelModules,\n} from \"@powerhousedao/reactor-browser\";\n\n/**\n * Document creation UI component.\n * Displays available document types as clickable buttons.\n */\nexport function CreateDocument() {\n const allowedDocumentModelModules = useAllowedDocumentModelModules();\n\n return (\n <div>\n {/* Customize section title here */}\n <h3 className=\"mb-3 mt-4 text-sm font-bold text-gray-600\">\n Create document\n </h3>\n {/* Customize layout by changing flex-wrap, gap, or grid layout */}\n <div className=\"flex w-full flex-wrap gap-4\">\n {allowedDocumentModelModules?.map((documentModelModule) => {\n return (\n <CreateDocumentButton\n key={documentModelModule.documentModel.global.id}\n documentModelModule={documentModelModule}\n />\n );\n })}\n </div>\n </div>\n );\n}\n\ntype Props = {\n documentModelModule: DocumentModelModule;\n};\nfunction CreateDocumentButton({ documentModelModule }: Props) {\n const documentType = documentModelModule.documentModel.global.id;\n const documentModelName =\n documentModelModule.documentModel.global.name || documentType;\n const documentModelDescription =\n documentModelModule.documentModel.global.description;\n return (\n <button\n className=\"cursor-pointer rounded-md bg-gray-200 py-2 px-3 hover:bg-gray-300\"\n title={documentModelName}\n aria-description={documentModelDescription}\n onClick={() => showCreateDocumentModal(documentType)}\n >\n {documentModelName}\n </button>\n );\n}\n`.raw;\n","import { tsx } from \"@tmpl/core\";\n\nexport const appDriveContentsFileTemplate = () =>\n tsx`\nimport { CreateDocument } from \"./CreateDocument.js\";\nimport { EmptyState } from \"./EmptyState.js\";\nimport { Files } from \"./Files.js\";\nimport { Folders } from \"./Folders.js\";\nimport { NavigationBreadcrumbs } from \"./NavigationBreadcrumbs.js\";\n\n/** Shows the documents and folders in the selected drive */\nexport function DriveContents() {\n return (\n <div className=\"space-y-6 px-6\">\n <NavigationBreadcrumbs />\n <Folders />\n <Files />\n <EmptyState />\n <CreateDocument />\n </div>\n );\n}\n\n\n`.raw;\n","import { tsx } from \"@tmpl/core\";\n\nexport const driveExplorerFileTemplate = tsx`\nimport type { EditorProps } from \"document-model\";\nimport { FolderTree } from \"./FolderTree.js\";\nimport { DriveContents } from \"./DriveContents.js\";\n\n/**\n * Main drive explorer component with sidebar navigation and content area.\n * Layout: Left sidebar (folder tree) + Right content area (files/folders + document editor)\n */\nexport function DriveExplorer({ children }: EditorProps) {\n // if a document is selected then it's editor will be passed as children\n const showDocumentEditor = !!children;\n\n return (\n <div className=\"flex h-full\">\n <FolderTree />\n <div className=\"flex-1 overflow-y-auto p-4\">\n {/* Conditional rendering: Document editor or folder contents */}\n {showDocumentEditor ? (\n /* Document editor view */\n children\n ) : (\n /* Folder contents view */\n <DriveContents />\n )}\n </div>\n </div>\n );\n}\n`.raw;\n","import { tsx } from \"@tmpl/core\";\n\nexport const emptyStateFileTemplate = tsx`\nimport { useNodesInSelectedDriveOrFolder } from \"@powerhousedao/reactor-browser\";\n\n/** Shows a message when the selected drive or folder is empty */\nexport function EmptyState() {\n const nodes = useNodesInSelectedDriveOrFolder();\n const hasNodes = nodes.length > 0;\n if (hasNodes) return null;\n\n return (\n <div className=\"py-12 text-center text-gray-500\">\n <p className=\"text-lg\">This folder is empty</p>\n <p className=\"mt-2 text-sm\">Create your first document or folder below</p>\n </div>\n );\n}\n`.raw;\n","import { tsx } from \"@tmpl/core\";\n\nexport const appFilesFileTemplate = () =>\n tsx`\nimport { FileItem } from \"@powerhousedao/design-system/connect\";\nimport {\n useNodesInSelectedDriveOrFolder,\n isFileNodeKind,\n} from \"@powerhousedao/reactor-browser\";\n\n/** Shows the files in the selected drive or folder */\nexport function Files() {\n const nodes = useNodesInSelectedDriveOrFolder();\n const fileNodes = nodes.filter((n) => isFileNodeKind(n));\n const hasFiles = fileNodes.length > 0;\n\n if (!hasFiles) return null;\n\n return (\n <div>\n <h3 className=\"mb-2 text-sm font-semibold text-gray-600\">Documents</h3>\n <div className=\"flex flex-wrap gap-4\">\n {fileNodes.map((fileNode) => (\n <FileItem key={fileNode.id} fileNode={fileNode} />\n ))}\n </div>\n </div>\n );\n}\n\n`.raw;\n","import { tsx } from \"@tmpl/core\";\n\nexport const appFoldersFileTemplate = () =>\n tsx`\nimport { FolderItem } from \"@powerhousedao/design-system/connect\";\nimport {\n useNodesInSelectedDriveOrFolder,\n isFolderNodeKind,\n} from \"@powerhousedao/reactor-browser\";\n\n/** Shows the folders in the selected drive or folder */\nexport function Folders() {\n const nodes = useNodesInSelectedDriveOrFolder();\n const folderNodes = nodes.filter((n) => isFolderNodeKind(n));\n const hasFolders = folderNodes.length > 0;\n if (!hasFolders) return null;\n\n return (\n <div>\n <h3 className=\"mb-2 text-sm font-bold text-gray-600\">Folders</h3>\n <div className=\"flex flex-wrap gap-4\">\n {folderNodes.map((folderNode) => (\n <FolderItem key={folderNode.id} folderNode={folderNode} />\n ))}\n </div>\n </div>\n );\n}\n`.raw;\n","import { tsx } from \"@tmpl/core\";\n\nexport const folderTreeFileTemplate = tsx`\n import {\n Sidebar,\n SidebarProvider,\n type SidebarNode,\n} from \"@powerhousedao/document-engineering\";\nimport {\n setSelectedNode,\n useNodesInSelectedDrive,\n useSelectedDrive,\n useSelectedNode,\n} from \"@powerhousedao/reactor-browser\";\nimport type { Node } from \"@powerhousedao/shared/document-drive\";\nimport { useMemo } from \"react\";\n\nfunction buildSidebarNodes(\n nodes: Node[],\n parentId: string | null | undefined,\n): SidebarNode[] {\n return nodes\n .filter((n) => {\n if (parentId == null) {\n return n.parentFolder == null;\n }\n return n.parentFolder === parentId;\n })\n .map((node): SidebarNode => {\n if (node.kind === \"folder\") {\n return {\n id: node.id,\n title: node.name,\n icon: \"FolderClose\" as const,\n expandedIcon: \"FolderOpen\" as const,\n children: buildSidebarNodes(nodes, node.id),\n };\n }\n return {\n id: node.id,\n title: node.name,\n icon: \"File\" as const,\n };\n });\n}\n\nfunction transformNodesToSidebarNodes(\n nodes: Node[],\n driveName: string,\n): SidebarNode[] {\n return [\n {\n id: \"root\",\n title: driveName,\n icon: \"Drive\" as const,\n children: buildSidebarNodes(nodes, null),\n },\n ];\n}\n\n/**\n * Hierarchical folder tree navigation component using Sidebar from document-engineering.\n * Displays folders and files in a tree structure with expand/collapse functionality, search, and resize support.\n */\nexport function FolderTree() {\n const [selectedDrive] = useSelectedDrive();\n const nodes = useNodesInSelectedDrive();\n const selectedNode = useSelectedNode();\n const driveName = selectedDrive.header.name;\n // Transform Node[] to hierarchical SidebarNode structure\n const sidebarNodes = useMemo(\n () => transformNodesToSidebarNodes(nodes || [], driveName),\n [nodes, driveName],\n );\n\n const handleActiveNodeChange = (node: SidebarNode) => {\n // If root node is selected, pass undefined to match existing behavior\n if (node.id === \"root\") {\n setSelectedNode(undefined);\n } else {\n setSelectedNode(node.id);\n }\n };\n // Map selectedNodeId to activeNodeId (use \"root\" when undefined)\n const activeNodeId =\n !selectedNode || selectedNode.id === selectedDrive.header.id\n ? \"root\"\n : selectedNode.id;\n\n return (\n <SidebarProvider nodes={sidebarNodes}>\n <Sidebar\n className=\"pt-1\"\n nodes={sidebarNodes}\n activeNodeId={activeNodeId}\n onActiveNodeChange={handleActiveNodeChange}\n sidebarTitle=\"Drive Explorer\"\n showSearchBar={true}\n resizable={true}\n allowPinning={false}\n showStatusFilter={false}\n initialWidth={256}\n defaultLevel={2}\n />\n </SidebarProvider>\n );\n}\n`.raw;\n","import { tsx } from \"@tmpl/core\";\n\nexport const driveExplorerNavigationBreadcrumbsFileTemplate = () =>\n tsx`\nimport { Breadcrumbs } from \"@powerhousedao/design-system/connect\";\n\n/** Shows the navigation breadcrumbs for the selected drive or folder */\nexport function NavigationBreadcrumbs() {\n return (\n <div className=\"border-b border-gray-200 pb-3 space-y-3\">\n <Breadcrumbs />\n </div>\n );\n}\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\nexport const appConfigFileTemplate = (v: {\n allowedDocumentTypesString: string;\n isDragAndDropEnabledString: string;\n}) =>\n ts`\nimport type { PHAppConfig } from \"@powerhousedao/reactor-browser\";\n\nexport const editorConfig: PHAppConfig = {\n isDragAndDropEnabled: ${v.isDragAndDropEnabledString},\n allowedDocumentTypes: ${v.allowedDocumentTypesString}\n};\n`.raw;\n","import { tsx } from \"@tmpl/core\";\n\nexport const appEditorFileTemplate = () =>\n tsx`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nimport { useSetPHAppConfig } from \"@powerhousedao/reactor-browser\";\nimport type { EditorProps } from \"document-model\";\nimport { DriveExplorer } from \"./components/DriveExplorer.js\";\nimport { editorConfig } from \"./config.js\";\n\n/** Editor component for the app */\nexport default function Editor(props: EditorProps) {\n // set the config for this app\n // you can update these configs in \\`./config.ts\\`\n useSetPHAppConfig(editorConfig);\n return (\n <div className=\"bg-gray-50 p-6\">\n <DriveExplorer {...props} />\n </div>\n );\n}\n`.raw;\n","import { md } from \"@tmpl/core\";\n\nexport const agentsTemplate = md`\n# Powerhouse Document Models Assistant\n\nThis project creates document models, editors, processors and subgraphs for the Powerhouse ecosystem. Your role is to help users create these modules based on their needs.\n\n## Core Concepts\n\n- **Document Model**: A template for creating documents. Defines schema and allowed operations for a document type.\n- **Document**: An instance of a document model containing actual data that follows the model's structure and can be modified using operations.\n- **Drive**: A document of type \"powerhouse/document-drive\" representing a collection of documents and folders. Add documents using \"addActions\" with \"ADD_FILE\" action.\n- **Action**: A proposed change to a document (JSON object with action name and input). Dispatch using \"addActions\" tool.\n- **Operation**: A completed change to a document containing the action plus metadata (index, timestamp, hash, errors). Actions become operations after dispatch.\n\n## Technology Primer\n\n- **Reactor**: The core Powerhouse engine. It is modular and storage-agnostic, loads document models at runtime, and synchronizes documents across nodes via drives.\n- **Reactor Package**: A deployable bundle that extends the Reactor. It contains one or more document models, editors, processors, and subgraphs. A Vetra project generates a Reactor Package.\n- **Connect**: The Powerhouse web application for document management. End users open Connect to browse drives, create documents, and interact with editors.\n- **Switchboard**: The Powerhouse API service. It exposes GraphQL and MCP endpoints so external tools can read/write documents programmatically.\n- **Vetra**: The local development environment for building Reactor Packages. It includes Vetra Studio (a local Connect instance) and Vetra Switchboard (a local Switchboard with reactor-mcp). Start it with \\`ph vetra\\`.\n\n## CRITICAL: MCP Tool Usage Rules\n\n**MANDATORY**: The \\`reactor-mcp\\` MUST BE USED when handling documents or document-models for the Powerhouse/Vetra ecosystem.\nIf the \\`reactor-mcp\\` server is unavailable, ask the user to run \\`ph vetra\\` on a separate terminal to start the server and try to reconnect to the MCP server, DO NOT run it yourself.\n\n### Key Requirements:\n\n- Never set document IDs manually - they're auto-generated by 'createDocument'\n- Minimize \"addActions\" calls by batching multiple actions together\n- Add new document model documents to \"vetra-{hash}\" drive unless specified otherwise\n- Always check document model schema before calling addActions\n- Use MCP tools for ALL document and document-model operations\n\n## Document Model Creation Workflow\n\n### 1. Planning Phase\n\n**MANDATORY**: Present your proposal to the user and ask for confirmation before implementing ANY document model.\n\n- **ALWAYS** describe the proposed document model structure (state schema, operations, modules) before creating\n- **NEVER** proceed with implementation without explicit user approval of your proposal\n- When in doubt, ask for clarification\n- Break complex models into logical modules and operations\n\n#### Document Type ID Format\n\n- **Type ID**: \\`{organization}/{document-type-name}\\` (e.g., \\`pizza-plaza/order\\`, \\`acme/invoice\\`)\n- **File extension**: 2-4 characters with leading dot (e.g., \\`.ordr\\`, \\`.inv\\`)\n- **Name**: Must match \\`/[a-zA-Z][a-zA-Z0-9 ]*/\\` — human-readable, capitalized (e.g., \\`\"Order\"\\`, \\`\"Invoice\"\\`)\n\n### 2. Pre-Implementation Requirements\n\n**MANDATORY**: Check document model schema before making any MCP tool calls.\n\n- **ALWAYS** use \\`mcp__reactor-mcp__getDocumentModelSchema\\` with \\`type: \"powerhouse/document-model\"\\` first\n- Review input schema requirements for operations like \\`ADD_MODULE\\`, \\`ADD_OPERATION\\`, etc.\n- Ensure all required parameters (like \\`id\\` or \\`scope\\` fields) are included in action inputs\n- This prevents failed tool calls and reduces iteration\n\n### 3. Implementation Requirements\n\n- Document model reducers must be **pure synchronous functions**\n- Reducers receive current state and operation, always returning the same result\n- Values like dates/IDs must come from operation input, not generated in reducer\n- Reducer code goes into SET_OPERATION_REDUCER action (no function header needed)\n- Reducers are wrapped with Mutative - you can mutate the state object directly\n- External imports go at the beginning of the actual reducer file in \\`src/\\`\n- Ensure that the reducer code of each operation in the document model schema is applied in \\`document-models/<document-model-name>/src/reducers/<module-name>.ts\\`\n\n### 4. Quality assurance\n\nAfter doing changes to the code, or after creating a new document model or a new editor, _YOU MUST RUN_ the following commands to check for errors in your implementation:\n\n- **TypeScript Check**: Run \\`npm run tsc\\` to validate type safety\n- **ESLint Check**: Run \\`npm run lint:fix\\` to check for errors with ESLint\n\n## Document editor creation flow\n\n**CRITICAL**: Creating a document editor is a **two-phase** process. You must NEVER skip Phase 1 or try to manually create editor files from scratch. The codegen system generates the boilerplate — your job is only to implement the UI inside it.\n\n### Phase 1: Create the editor document via MCP (MANDATORY FIRST STEP)\n\n**NEVER** start by writing editor code, creating component files, or looking at how to scaffold an editor manually. The **only** way to create a new editor is through the MCP tools:\n\n1. Check if the document editor already exists. If it does, ask the user if a new one should be created or if the existing one should be reimplemented\n2. If it's a new editor, get the document editor schema using \\`mcp__reactor-mcp__getDocumentModelSchema\\` with \\`type: \"powerhouse/document-editor\"\\`\n3. Create a new editor document on the \\`vetra-{hash}\\` drive of type \\`powerhouse/document-editor\\` using \\`mcp__reactor-mcp__addActions\\` with the \\`ADD_FILE\\` action\n4. Configure the editor document with the required actions (set the editor name, target document model, etc.) according to the schema\n\n⚠️ **The editor document MUST be confirmed/published — if it is left as draft state, the codegen will NOT run and no editor files will be generated.** Make sure the document state is not \"DRAFT\" after creation.\n\n5. Once the editor document is confirmed on the drive, the codegen automatically runs and generates boilerplate files in the \\`editors/\\` folder, including hooks, type definitions, and the editor component shell\n\n### Phase 2: Implement the editor UI\n\nOnly **after** the codegen has produced the boilerplate files, proceed with the UI implementation:\n\n- Inspect the generated files in the \\`editors/\\` folder — do NOT create new files for the main editor component; edit the generated one\n- Inspect the hooks in \\`editors/hooks\\` as they should be useful\n- Read the schema of the document model that the editor is for to know how to interact with it\n- Every editor **MUST** include \\`<DocumentToolbar />\\` imported from \\`@powerhousedao/design-system/connect/index\\`. Place it at the top of the editor component — do not put anything next to it.\n- Style the editor using tailwind classes or a style tag. If using a style tag, make sure to make the selectors specific to only apply to the editor component.\n- Create modular components for the UI elements and place them on separate files to make it easier to maintain and update\n- Consider using the React Components exported by \\`@powerhousedao/design-system\\` and \\`@powerhousedao/document-engineering\\`\n- Separate business logic from presentation logic\n- Use TypeScript for type safety, avoid using any and type casting\n- Always check for type and lint errors after creating or modifying the editor\n- **CRITICAL**: After creating a new editor, verify that \\`editors/editors.ts\\` includes the new editor module. The codegen should update this file automatically, but if it doesn't, manually add the import and include the editor in the \\`editors\\` array. Without this registration, Connect won't find an editor for the document type. Example:\n\n ~~~typescript\n import type { EditorModule } from \"document-model\";\n import { TodoListEditor } from \"./todo-list-editor/module.js\";\n\n export const editors: EditorModule[] = [TodoListEditor];\n ~~~\n\n### Document Editor Implementation Pattern\n\n**CRITICAL**: When implementing document editors, use the modern React hooks pattern from \\`@powerhousedao/reactor-browser\\`.\n\nThe following section is valid for editors that edit a single document type.\n\n#### Required Imports and Setup\n\nUsing a \"Todo\" document model as example:\n\n~~~typescript\nimport { generateId } from \"document-model\";\nimport { useSelectedTodoDocument } from \"../hooks/useTodoDocument.js\";\nimport {\n addTodo,\n} from \"../../document-models/todo/gen/creators.js\";\n\nexport default function Editor() {\n const [document, dispatch] = useSelectedTodoDocument();\n\n function handleAddTodo(values: { title: string }) {\n if (values.title) {\n dispatch(addTodo({ id: generateId(), title: values.title }));\n }\n };\n\n// Note: The \\`useSelectedTodoDocument\\` hook is auto-generated. Check the \\`editors/hooks\\` folder for the exact hook name.\n// Action creators like \\`addTodo\\` are exported from the document model's \\`gen/creators.js\\` file.\n~~~\n\nThe \\`useSelectedTodoDocument\\` gets generated automatically so you don't need to implement it yourself.\n\n## ⚠️ CRITICAL: Generated Files & Modification Rules\n\n### Generated Files Rule\n\n**NEVER edit files in \\`gen/\\` folders** - they are auto-generated and will be overwritten.\n\n### Document Model Modification Process\n\nFor ANY document model changes, follow this **mandatory** two-step process:\n\n#### Step 1: Update Document Model via MCP\n\nUse \\`mcp__reactor-mcp__addActions\\` with operations like:\n\n- \\`SET_OPERATION_SCHEMA\\` - update input/output schemas\n- \\`SET_OPERATION_REDUCER\\` - update reducer code\n- \\`SET_STATE_SCHEMA\\` - update state definitions\n\n#### Step 2: Update Existing Source Files\n\n**ALSO manually update existing reducer files in \\`src/\\` folder** - these are NOT auto-generated.\nMake sure to check if the operation reducer code needs to be updated after changing the state schema.\n\n### ⚠️ Critical Reminder\n\n**ALWAYS do BOTH steps when fixing reducer issues:**\n\n1. ✅ Fix existing reducer files in \\`src/\\` manually\n2. ✅ Update document model via MCP with same fixes\n\n**Forgetting step 2 means future code generations will still contain the bugs!**\n\n## Reducer Implementation Guidelines\n\n### ❌ Forbidden in Reducers (Non-Deterministic)\n\n- \\`crypto.randomUUID()\\`, \\`Math.random()\\`, \\`Date.now()\\`, \\`new Date()\\`\n- External API calls or side effects\n- Asynchronous functions\n- Any non-deterministic functions\n\n### ❌ Forbidden Patterns\n\n~~~typescript\n// NEVER use fallback values with non-deterministic functions\nid: action.input.id || crypto.randomUUID(); // ❌ FORBIDDEN\ntimestamp: action.input.timestamp || new Date(); // ❌ FORBIDDEN\n~~~\n\n### ✅ Required Pattern\n\nAll dynamic values must come from action input:\n\n- **IDs**: Include \\`id: OID!\\` in input schema, use \\`action.input.id\\` in reducer\n- **Timestamps**: Include \\`timestamp: DateTime!\\` in input schema\n- **Computed values**: Calculate before dispatching action\n\n### Example\n\n~~~typescript\n// ❌ BAD - impure reducer\nconst newItem = {\n id: crypto.randomUUID(), // Non-deterministic\n createdAt: new Date(), // Non-deterministic\n};\n\n// ✅ GOOD - pure reducer\nconst newItem = {\n id: action.input.id, // From action input\n createdAt: action.input.createdAt, // From action input\n};\n~~~\n\n### Handling Nullable Input Types\n\n**CRITICAL**: Be careful when handling optional input types:\n\n- Optional input types use \\`InputMaybe<T>\\` allowing \\`null | undefined | T\\`.\n- Optional state types use \\`Maybe<T>\\` = \\`T | null\\`.\n- If there is no applicable default value then use \\`|| null\\`.\n\n~~~typescript\n// ❌ BAD - Type error with Maybe<string>\namount: action.input.amount,\nnotes: action.input.notes,\n\n// ✅ GOOD - Matches Maybe<T> = T | null\namount: action.input.amount || null,\nnotes: action.input.notes || [],\n~~~\n\nUse truthy checks when conditionally assigning optional values from input to state:\n\n~~~typescript\n// ❌ BAD - Type 'string | null' is not assignable to type 'string'.\nif (action.input.field !== undefined) entry.field = action.input.field;\n\n// ✅ GOOD - use truthy checks\nif (action.input.field) state.field = action.input.field;\n\n// ✅ GOOD - For booleans use explicit null/undefined checks\nif (action.input.field !== undefined && action.input.field !== null)\n state.field = action.input.field;\n~~~\n\n### Error Handling in Operations\n\n**MANDATORY**: Define specific error types for each operation to handle invalid inputs and edge cases properly.\nAction inputs are validated so they are guaranteed to respect the input schema.\nErrors referenced in the reducer code will be imported automatically.\n\n#### Error Definition Requirements\n\n1. **Add error definitions** to operations using \\`ADD_OPERATION_ERROR\\`:\n - \\`errorCode\\`: Uppercase snake_case (e.g., \\`\"MISSING_ID\"\\`, \\`\"ENTRY_NOT_FOUND\"\\`)\n - \\`errorName\\`: PascalCase ending with \"Error\" (e.g., \\`\"MissingIdError\"\\`, \\`\"EntryNotFoundError\"\\`)\n - \\`errorDescription\\`: Human-readable description of the error condition\n\n2. **Error names must end with \"Error\"** for consistency and code generation\n\n3. **Use specific error types** rather than generic validation\n\n4. **Must use unique error names and ids**\n\n#### Error Usage in Reducers\n\n~~~typescript\n// ✅ GOOD - Throw specific errors by name\nif (!action.input.id) {\n throw new MissingIdError(\"ID is required for operation\");\n}\n\nif (entryIndex === -1) {\n throw new EntryNotFoundError(\\`Entry not found\\`);\n}\n\n// ❌ BAD - Generic Error\nthrow new Error(\"Something went wrong\");\n\n// ❌ BAD - Nested error access\nthrow new errors.ModuleName.MissingIdError(\"message\");\n\n// ❌ BAD - Do not import error classes in the reducer code,\nimport { MissingIdError } from \"../../gen/module-name/error.js\";\n\n// ✅ GOOD - Simply reference the error and it will be imported automatically\nthrow new MissingIdError(\"message\");\n~~~\n\n#### Common Error Patterns\n\n- **EntityNotFoundError**: Referenced entity doesn't exist\n- **DuplicateIdError**: ID already exists when creating new entries\n- **InvalidInputError**: Business logic violations\n- **PermissionDeniedError**: Access control violations\n\n#### Testing Reducer Errors\n\n**CRITICAL**: When a reducer throws an error, the operation is **still added** to the document but with an \\`.error\\` property containing the error message as a string.\n\n**DO NOT** use \\`.toThrow()\\` or \\`expect(() => ...).toThrow()\\` patterns when testing reducer errors.\n\n##### How Errors Work in Operations\n\n1. The reducer throws an error (e.g., \\`throw new InvalidNameError(\"message\")\\`)\n2. The operation is still recorded in \\`document.operations.global\\` (or \\`.local\\`)\n3. The error message is stored in \\`operation.error\\` as a string\n4. **The state is NOT mutated** - it remains unchanged from before the operation\n\n##### Accessing the Correct Operation Index\n\n**CRITICAL**: You must access the correct operation index. The index corresponds to how many operations were dispatched before it.\n\n- If this is the **first** operation dispatched, access \\`[0]\\`\n- If 3 operations were dispatched **before** the failing one, access \\`[3]\\`\n\n##### Example\n\n~~~typescript\nit(\"should return error and not mutate state\", () => {\n const document = utils.createDocument();\n const initialState = document.state.global.name;\n\n const updatedDocument = reducer(document, setName({ name: \"invalid\" }));\n\n // Access the correct operation index (0 = first operation)\n expect(updatedDocument.operations.global[0].error).toBe(\n \"Name is not allowed\",\n );\n // State remains unchanged\n expect(updatedDocument.state.global.name).toBe(initialState);\n});\n\n// ❌ WRONG - Never use toThrow()\nexpect(() => reducer(document, setName({ name: \"invalid\" }))).toThrow();\n~~~\n\n## Document Model Structure\n\n### Core Components\n\n- **Basic Metadata**: \\`id\\`, \\`name\\`, \\`extension\\`, \\`description\\`, \\`author\\` (name + website)\n- **Specifications**: Versioned specs with \\`version\\`, \\`changeLog\\`, \\`state\\` (global/local with schema, initialValue, examples)\n- **Modules**: Operational modules containing their operations\n\n## Available Document Model Operations (37 total)\n\n| Category | Operations | Count |\n| -------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----- |\n| **Header Management** | \\`SET_MODEL_NAME\\`, \\`SET_MODEL_ID\\`, \\`SET_MODEL_EXTENSION\\`, \\`SET_MODEL_DESCRIPTION\\`, \\`SET_AUTHOR_NAME\\`, \\`SET_AUTHOR_WEBSITE\\` | 6 |\n| **Versioning** | ⚠️ **DO NOT USE** - Not implemented | 0 |\n| **Module Management** | \\`ADD_MODULE\\`, \\`SET_MODULE_NAME\\`, \\`SET_MODULE_DESCRIPTION\\`, \\`DELETE_MODULE\\`, \\`REORDER_MODULES\\` | 5 |\n| **Operation Management** | \\`ADD_OPERATION\\`, \\`SET_OPERATION_NAME\\`, \\`SET_OPERATION_SCHEMA\\`, \\`SET_OPERATION_DESCRIPTION\\`, \\`SET_OPERATION_TEMPLATE\\`, \\`SET_OPERATION_REDUCER\\`, \\`MOVE_OPERATION\\`, \\`DELETE_OPERATION\\`, \\`REORDER_MODULE_OPERATIONS\\` | 9 |\n| **Operation Error Management** | \\`ADD_OPERATION_ERROR\\`, \\`SET_OPERATION_ERROR_CODE\\`, \\`SET_OPERATION_ERROR_NAME\\`, \\`SET_OPERATION_ERROR_DESCRIPTION\\`, \\`SET_OPERATION_ERROR_TEMPLATE\\`, \\`DELETE_OPERATION_ERROR\\`, \\`REORDER_OPERATION_ERRORS\\` | 7 |\n| **Operation Example Management** | \\`ADD_OPERATION_EXAMPLE\\`, \\`UPDATE_OPERATION_EXAMPLE\\`, \\`DELETE_OPERATION_EXAMPLE\\`, \\`REORDER_OPERATION_EXAMPLES\\` | 4 |\n| **State Management** | \\`SET_STATE_SCHEMA\\`, \\`SET_INITIAL_STATE\\`, \\`ADD_STATE_EXAMPLE\\`, \\`UPDATE_STATE_EXAMPLE\\`, \\`DELETE_STATE_EXAMPLE\\`, \\`REORDER_STATE_EXAMPLES\\` | 6 |\n\n## Best Practices & Design Principles\n\n### Scope Selection\n\n- **\\`scope: \"global\"\\`**: State shared among all users with document access\n- **\\`scope: \"local\"\\`**: State private to each individual user\n\n### Operation Design\n\n- Use descriptive operation names (e.g., \\`ADD_LINE_ITEM\\`, \\`UPDATE_RECIPIENT\\`)\n- One operation per user intent (separate concerns)\n- Include comprehensive examples and error definitions\n- Organize related operations into logical modules\n\n## GraphQL Schema Guidelines\n\n### Document State Schema\n\n- **Most fields optional** to support creating empty documents\n- Use required fields \\`!\\` only when absolutely necessary\n- Defaults handled by operations, not schema\n\n#### Mandatory vs Optional Field Rules\n\nA user must always be able to create an **empty document** without providing any information. This drives the following rules:\n\n- **Root type properties** can only be mandatory (\\`!\\`) if they have a logical default value (e.g., empty array, enum initial status)\n- **Collections** should always use \\`[Type!]!\\` — inner \\`!\\` means no nulls in the array, outer \\`!\\` means the array itself defaults to empty\n- **Child object fields** can be mandatory only if all their required properties also have logical defaults\n- Use \\`enum\\` types for workflow statuses (e.g., \\`status: OrderStatus!\\` where the enum has an initial value like \\`DRAFT\\`)\n\n### ⚠️ CRITICAL: State Type Naming Convention\n\n**MANDATORY**: The global state type name MUST follow this exact pattern:\n\n~~~graphql\ntype <DocumentModelName>State {\n # your fields here\n}\n~~~\n\n**DO NOT** append \"Global\" to the state type name, even when defining global state:\n\n~~~graphql\n// ❌ WRONG - Do not use \"GlobalState\" suffix\ntype TodoListGlobalState {\n todos: [Todo!]!\n}\n\n// ✅ CORRECT - Use only \"State\" suffix\ntype TodoListState {\n todos: [Todo!]!\n}\n\n// ✅ CORRECT - Use \"LocalState\" suffix for Local scope\ntype TodoListLocalState {\n localTodos: [Todo!]!\n}\n~~~\n\n**Why this matters:**\n\n- The code generator expects the type to be named \\`<DocumentModelName>State\\`\n- Using \\`GlobalState\\` or \\`LocalState\\` suffix will cause TypeScript compilation errors\n- This applies when using \\`SET_STATE_SCHEMA\\` with \\`scope: \"global\"\\`\n\n**Rule**: For global state, the type should be \\`<DocumentModelName>State\\`. For local state (if needed), the type name should be \\`<DocumentModelName>LocalState\\`.\n\n### Available Scalar Types\n\n| Standard | Custom Identity | Custom Amounts | Custom Specialized |\n| --------- | ---------------------- | ------------------- | ------------------ |\n| \\`String\\` | \\`OID\\` (Object ID) | \\`Amount\\` | \\`EthereumAddress\\` |\n| \\`Int\\` | \\`PHID\\` (Powerhouse ID) | \\`Amount_Tokens\\` | \\`EmailAddress\\` |\n| \\`Float\\` | \\`OLabel\\` | \\`Amount_Money\\` | \\`Date\\` |\n| \\`Boolean\\` | | \\`Amount_Fiat\\` | \\`DateTime\\` |\n| | | \\`Amount_Currency\\` | \\`URL\\` |\n| | | \\`Amount_Crypto\\` | \\`Currency\\` |\n| | | \\`Amount_Percentage\\` | |\n\n### Arrays and Objects\n\n- **Arrays**: Must be mandatory \\`[ObjectType!]!\\`\n- **Objects in arrays**: Must include \\`OID!\\` field for unique identification\n- Include \\`OLabel\\` for metadata when relevant\n\n#### OID vs PHID Usage\n\n- \\`OID\\` is used as **primary key** (\\`id: OID!\\`) and **foreign key** (\\`otherObjectId: OID!\\`) within a document\n- \\`PHID\\` is **only** for referencing **external documents** (other documents in the drive), typically alongside cached properties (like a link preview — title/snippet may become stale)\n- **NEVER** use the \\`ID\\` type — it is a common GraphQL convention but is not used in Powerhouse document models\n\n#### Collection Sorting & Trees\n\n- **No need for \\`position\\` or \\`weight\\` properties** — maintain order via array index; operations like \\`MOVE_X\\` reorder the array directly\n- **Trees**: Always define as a flat list with \\`parentId: OID\\` (root nodes have \\`parentId = null\\`); do NOT use recursive/nested types\n\n### Input Types\n\n- Reflect user intent with descriptive names\n- Simple, specific fields over complex nested types\n- System auto-generates \\`OID\\` for new objects (users don't provide manually)\n\n#### Input Type Naming Convention\n\n- Root input type **MUST** be named \\`<OperationName>Input\\` (PascalCase of the operation name)\n- Example: operation \\`SET_CATEGORY_LABEL\\` → input type \\`SetCategoryLabelInput\\`\n- **Failing to follow this convention breaks the code generator**\n\n#### Input Types Cannot Reference State Types\n\n- In operation input schemas, **ONLY** \\`enum\\` types and scalar types from the state schema can be referenced directly\n- All other state types must be **mirrored** with unique input types (e.g., state type \\`MenuItem\\` → input type \\`NewMenuItemInput\\` for the ADD operation)\n- State \\`enum\\` types **MUST NOT** be redefined in input schemas — reference them directly\n- Each operation should have its **own** input types; do not share mirror types across operations\n\n#### Empty Input Workaround\n\n- Input types with **zero fields** are not supported by the code generator\n- Workaround: add \\`_: Boolean\\` as a dummy optional parameter\n\n~~~graphql\n# ❌ BAD - empty input type breaks codegen\ninput ClearAllInput {}\n\n# ✅ GOOD - dummy field workaround\ninput ClearAllInput {\n _: Boolean\n}\n~~~\n\n## Working with Drives\n\n**MANDATORY**: Check the document-drive schema before performing drive operations.\n\n### Drive Types\n\nThere might be two drives available with a special use case:\n\n1. **Vetra Drive** (\\`vetra-{hash}\\`):\n - Contains **source documents**: document models and document editors\n - Used for development\n - Add document model and editor definitions here\n\n2. **Preview Drive** (\\`preview-{hash}\\`, named \"Vetra Preview\"):\n - Contains **demo and preview documents** (document instances)\n - Used for showcasing and testing document models\n - Add actual document instances here\n\n### Drive Operations\n\nWhen working with drives (adding/removing documents, creating folders, etc.):\n\n1. **Always get the drive schema first**:\n\n ~~~typescript\n mcp__reactor -\n mcp__getDocumentModelSchema({ type: \"powerhouse/document-drive\" });\n ~~~\n\n2. **Review available operations** in the schema, such as:\n - \\`ADD_FILE\\` - Add a document to the drive\n - \\`ADD_FOLDER\\` - Create a new folder\n - \\`DELETE_NODE\\` - Remove a file or folder (use this, NOT \"DELETE_FILE\")\n - \\`UPDATE_NODE\\` - Update node properties\n - \\`MOVE_NODE\\` - Move a node to different location\n\n3. **Check input schemas** for each operation to ensure you're passing correct parameters\n`.raw;\n","import { json } from \"@tmpl/core\";\n\nexport const claudeSettingsLocalTemplate = json`\n{\n \"$schema\": \"https://json.schemastore.org/claude-code-settings.json\",\n \"permissions\": {\n \"allow\": [\"Bash(npm run tsc:*)\", \"Bash(npm run lint:*)\"],\n \"deny\": [\n \"Write(./document-models/*/gen/**)\",\n \"Write(./.ph/**)\",\n \"Edit(./document-models/*/gen/**)\",\n \"Edit(./.ph/**)\"\n ]\n },\n \"enableAllProjectMcpServers\": true,\n \"enabledMcpjsonServers\": [\"reactor-mcp\"]\n}\n`.raw;\n","import { json } from \"@tmpl/core\";\n\nexport const cursorMcpTemplate = json`\n{\n \"mcpServers\": {\n \"reactor-mcp\": {\n \"type\": \"http\",\n \"url\": \"http://localhost:4001/mcp\"\n }\n }\n}\n`.raw;\n","export const connectEntrypointTemplate = `#!/bin/sh\nset -e\n\n# Substitute environment variables in nginx configuration\nenvsubst '\\${PORT},\\${PH_CONNECT_BASE_PATH}' < /etc/nginx/nginx.conf.template > /etc/nginx/nginx.conf\n\necho \"Testing nginx configuration...\"\nnginx -t\n\nif [ $? -eq 0 ]; then\n echo \"Connect available at: http://localhost:\\${PORT}\\${PH_CONNECT_BASE_PATH}\"\n exec nginx -g \"daemon off;\"\nelse\n echo \"Nginx configuration test failed\"\n exit 1\nfi\n`;\n","export const dockerfileTemplate = `# =============================================================================\n# Multi-stage Dockerfile for Powerhouse Document Model Packages\n# Produces two images: connect (frontend) and switchboard (backend)\n#\n# Build commands:\n# docker build --target connect -t <registry>/<project>/connect:<tag> .\n# docker build --target switchboard -t <registry>/<project>/switchboard:<tag> .\n# =============================================================================\n\n# -----------------------------------------------------------------------------\n# Base stage: Common setup for building\n# -----------------------------------------------------------------------------\nFROM node:24-alpine AS base\n\nWORKDIR /app\n\n# Install build dependencies\nRUN apk add --no-cache python3 make g++ git bash \\\\\n && ln -sf /usr/bin/python3 /usr/bin/python\n\n# Setup pnpm\nENV PNPM_HOME=\"/pnpm\"\nENV PATH=\"$PNPM_HOME:$PATH\"\nRUN corepack enable && corepack prepare pnpm@latest --activate\n\n# Configure JSR registry\nRUN pnpm config set @jsr:registry https://npm.jsr.io\n\n# Build arguments\nARG TAG=latest\nARG PH_CONNECT_BASE_PATH=\"/\"\n\n# Install ph-cmd, prisma, and prettier globally\nRUN pnpm add -g ph-cmd@$TAG prisma@5.17.0 prettier\n\n# Initialize project based on tag (dev/staging/latest)\nRUN case \"$TAG\" in \\\\\n *dev*) ph init project --dev --package-manager pnpm ;; \\\\\n *staging*) ph init project --staging --package-manager pnpm ;; \\\\\n *) ph init project --package-manager pnpm ;; \\\\\n esac\n\nWORKDIR /app/project\n\n# Copy package files for the current package\nCOPY package.json pnpm-lock.yaml ./\n\n# Install the current package (this package)\nARG PACKAGE_NAME\nRUN if [ -n \"$PACKAGE_NAME\" ]; then \\\\\n echo \"Installing package: $PACKAGE_NAME\"; \\\\\n ph install \"$PACKAGE_NAME\"; \\\\\n else \\\\\n echo \"Warning: PACKAGE_NAME not provided, using local build\"; \\\\\n pnpm install; \\\\\n fi\n\n# Regenerate Prisma client for Alpine Linux\nRUN prisma generate --schema node_modules/document-drive/dist/prisma/schema.prisma || true\n\n# -----------------------------------------------------------------------------\n# Connect build stage\n# -----------------------------------------------------------------------------\nFROM base AS connect-builder\n\nARG PH_CONNECT_BASE_PATH=\"/\"\n\n# Build connect\nRUN ph connect build --base \\${PH_CONNECT_BASE_PATH}\n\n# -----------------------------------------------------------------------------\n# Connect final stage - nginx\n# -----------------------------------------------------------------------------\nFROM nginx:alpine AS connect\n\n# Install envsubst for config templating\nRUN apk add --no-cache gettext\n\n# Copy nginx config template\nCOPY docker/nginx.conf /etc/nginx/nginx.conf.template\n\n# Copy built static files from build stage\nCOPY --from=connect-builder /app/project/.ph/connect-build/dist /var/www/html/project\n\n# Environment variables for nginx config\nENV PORT=3001\nENV PH_CONNECT_BASE_PATH=\"/\"\n\n# Copy and setup entrypoint\nCOPY docker/connect-entrypoint.sh /docker-entrypoint.sh\nRUN chmod +x /docker-entrypoint.sh\n\nEXPOSE \\${PORT}\n\nHEALTHCHECK --interval=30s --timeout=3s --start-period=10s --retries=3 \\\\\n CMD wget -q --spider http://localhost:\\${PORT}/health || exit 1\n\nENTRYPOINT [\"/docker-entrypoint.sh\"]\n\n# -----------------------------------------------------------------------------\n# Switchboard final stage - node runtime\n# -----------------------------------------------------------------------------\nFROM node:24-alpine AS switchboard\n\nWORKDIR /app\n\n# Install runtime dependencies\nRUN apk add --no-cache curl openssl\n\n# Setup pnpm\nENV PNPM_HOME=\"/pnpm\"\nENV PATH=\"$PNPM_HOME:$PATH\"\nRUN corepack enable && corepack prepare pnpm@latest --activate\n\n# Configure JSR registry\nRUN pnpm config set @jsr:registry https://npm.jsr.io\n\n# Install ph-cmd and prisma globally (needed at runtime)\nARG TAG=latest\nRUN pnpm add -g ph-cmd@$TAG prisma@5.17.0\n\n# Copy built project from build stage\nCOPY --from=base /app/project /app/project\n\nWORKDIR /app/project\n\n# Copy entrypoint\nCOPY docker/switchboard-entrypoint.sh /app/entrypoint.sh\nRUN chmod +x /app/entrypoint.sh\n\n# Environment variables\nENV NODE_ENV=production\nENV PORT=3000\nENV DATABASE_URL=\"\"\nENV SKIP_DB_MIGRATIONS=\"false\"\n\nEXPOSE \\${PORT}\n\nHEALTHCHECK --interval=30s --timeout=3s --start-period=30s --retries=3 \\\\\n CMD curl -f http://localhost:\\${PORT}/health || exit 1\n\nENTRYPOINT [\"/app/entrypoint.sh\"]\n`;\n","export const nginxConfTemplate = `user nginx;\nworker_processes auto;\nerror_log /var/log/nginx/error.log warn;\npid /var/run/nginx.pid;\n\nevents {\n worker_connections 1024;\n}\n\nhttp {\n include /etc/nginx/mime.types;\n default_type application/octet-stream;\n\n # Gzip compression\n gzip on;\n gzip_vary on;\n gzip_proxied any;\n gzip_comp_level 6;\n gzip_buffers 16 8k;\n gzip_http_version 1.1;\n gzip_types text/plain text/css application/javascript application/json image/svg+xml application/xml+rss image/avif;\n\n server {\n listen 0.0.0.0:\\${PORT};\n server_name _;\n root /var/www/html/project;\n\n # Health check endpoint\n location /health {\n access_log off;\n add_header Content-Type text/plain;\n return 200 'OK';\n }\n\n location \\${PH_CONNECT_BASE_PATH}/assets/ {\n alias /var/www/html/project/assets/;\n access_log off;\n log_not_found off;\n etag off;\n expires max;\n add_header Cache-Control \"public, max-age=31536000, immutable\";\n }\n\n location \\${PH_CONNECT_BASE_PATH}/fonts/ {\n alias /var/www/html/project/fonts/;\n access_log off;\n log_not_found off;\n expires max;\n add_header Cache-Control \"public, max-age=31536000, immutable\";\n }\n\n location = \\${PH_CONNECT_BASE_PATH}/service-worker.js {\n alias /var/www/html/project/service-worker.js;\n\n access_log off;\n log_not_found off;\n etag off;\n\n add_header Cache-Control \"no-cache, no-store, must-revalidate\";\n add_header Pragma \"no-cache\";\n add_header Expires \"0\";\n }\n\n # match any file in the root folder, except index.html\n location ~ \\${PH_CONNECT_BASE_PATH}/(?!index\\\\.html$)([^/]+\\\\.[a-zA-Z0-9]+)$ {\n alias /var/www/html/project/$1;\n access_log off;\n log_not_found off;\n etag on;\n add_header Cache-Control \"public, must-revalidate\";\n }\n\n location \\${PH_CONNECT_BASE_PATH} {\n try_files $uri $uri/ /index.html;\n }\n }\n}\n`;\n","export const switchboardEntrypointTemplate = `#!/bin/sh\nset -e\n\n# Regenerate Prisma client for current platform (fixes darwin-arm64 vs linux-musl-openssl mismatch)\necho \"[entrypoint] Regenerating Prisma client for current platform...\"\nprisma generate --schema node_modules/document-drive/dist/prisma/schema.prisma\n\n# Run migrations if DATABASE_URL is postgres and migrations not skipped\nif [ -n \"$DATABASE_URL\" ] && echo \"$DATABASE_URL\" | grep -q \"^postgres\" && [ \"$SKIP_DB_MIGRATIONS\" != \"true\" ]; then\n echo \"[entrypoint] Running Prisma db push...\"\n prisma db push --schema node_modules/document-drive/dist/prisma/schema.prisma --skip-generate\n echo \"[entrypoint] Running migrations...\"\n ph switchboard --migrate\nfi\n\necho \"[entrypoint] Starting switchboard on port \\${PORT:-3000}...\"\nexec ph switchboard --port \\${PORT:-3000}\n`;\n","import { ts } from \"@tmpl/core\";\n\nexport const documentModelsTemplate = ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nimport type { DocumentModelModule } from \"document-model\";\n\nexport const documentModels: DocumentModelModule<any>[] = [];\n`.raw;\n","export const documentModelsIndexTemplate = `\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\n`;\n","import { ts } from \"@tmpl/core\";\n\nexport const upgradeManifestsTemplate = ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nimport type { UpgradeManifest } from \"document-model\";\n\nexport const upgradeManifests: UpgradeManifest<readonly number[]>[] = [];\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\nexport const editorsTemplate = ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nimport type { EditorModule } from \"document-model\";\n\nexport const editors: EditorModule[] = [];\n`.raw;\n","export const editorsIndexTemplate = `\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\n`;\n","import { js } from \"@tmpl/core\";\n\nexport const eslintConfigTemplate = js`\n// @ts-check\nimport { default as eslint } from \"@eslint/js\";\nimport eslintPluginPrettierRecommended from \"eslint-plugin-prettier/recommended\";\nimport reactPlugin from \"eslint-plugin-react\";\nimport reactHooksPlugin from \"eslint-plugin-react-hooks\";\nimport { defineConfig, globalIgnores } from \"eslint/config\";\nimport globals from \"globals\";\nimport tseslint from \"typescript-eslint\";\n\n/** These files are typically ignored by eslint by default, so there is no need to investigate why they are ignored. */\nconst ignoredFiles = [\n \"**/node_modules/\",\n \"**/dist/\",\n \"**/.ph/\",\n \"**/storybook-static/\",\n \"**/.vite/\",\n];\n\n/** Global configs for eslint ignores */\nconst ignored = globalIgnores(ignoredFiles);\n\n/** Typescript (\\`.ts\\`) files */\nconst typescriptFiles = [\"**/*.ts\"];\n\n/** Typescript React (\\`.tsx\\`) files */\nconst typescriptReactFiles = [\"**/*.tsx\"];\n\n/** Javascript (\\`.js\\`, \\`.cjs\\`, \\`.mjs\\`) files */\nconst javascriptFiles = [\"**/*.js\", \"**/*.cjs\", \"**/*.mjs\"];\n\n/** Typescript rules that we have chosen to opt out of in general */\n/** @type {import(\"eslint\").Linter.RulesRecord} */\nconst typescriptRules = {\n \"@typescript-eslint/consistent-type-imports\": [\n \"error\",\n {\n prefer: \"type-imports\",\n disallowTypeAnnotations: true,\n fixStyle: \"separate-type-imports\",\n },\n ],\n \"@typescript-eslint/no-explicit-any\": \"off\",\n \"@typescript-eslint/no-unused-vars\": [\n \"warn\",\n {\n argsIgnorePattern: \"^_\",\n varsIgnorePattern: \"^_\",\n caughtErrorsIgnorePattern: \"^_\",\n },\n ],\n \"@typescript-eslint/no-unnecessary-condition\": \"warn\",\n \"@typescript-eslint/require-await\": \"warn\",\n \"@typescript-eslint/no-misused-promises\": \"warn\",\n \"@typescript-eslint/no-floating-promises\": \"warn\",\n \"@typescript-eslint/no-empty-object-type\": \"warn\",\n \"@typescript-eslint/no-duplicate-type-constituents\": \"warn\",\n \"@typescript-eslint/restrict-template-expressions\": [\n \"warn\",\n {\n allowNumber: true,\n },\n ],\n};\n\n/** Language options for typescript files \n@type {import(\"eslint\").Linter.LanguageOptions} */\nconst typescriptLanguageOptions = {\n sourceType: \"module\",\n ecmaVersion: \"latest\",\n globals: {\n ...globals.browser,\n ...globals.node,\n },\n parserOptions: {\n projectService: {\n allowDefaultProject: [\"eslint.config.js\", \"vitest.config.ts\"],\n },\n tsconfigRootDir: import.meta.dirname,\n ecmaFeatures: {\n jsx: true,\n },\n },\n};\n\n/** React plugins */\nconst reactPlugins = {\n react: reactPlugin,\n \"react-hooks\": reactHooksPlugin,\n};\n\n/** React settings */\nconst reactSettings = {\n react: {\n version: \"detect\",\n },\n};\n\n/** Typescript config for both \\`.ts\\` and \\`.tsx\\` files */\nconst typescriptConfig = {\n files: [...typescriptFiles, ...typescriptReactFiles],\n languageOptions: typescriptLanguageOptions,\n rules: typescriptRules,\n};\n\n/** React config for \\`.tsx\\` files */\nconst reactConfig = {\n files: typescriptReactFiles,\n settings: reactSettings,\n plugins: reactPlugins,\n};\n\n/** Config for javascript files */\nconst javascriptConfig = {\n // disable type aware linting for js files\n files: javascriptFiles,\n extends: [tseslint.configs.disableTypeChecked],\n};\n\n/** Recommended config from eslint */\nconst eslintRecommendedConfig = eslint.configs.recommended;\n\n/** Recommended config from typescript-eslint */\nconst typescriptEsLintRecommendedConfig = [\n ...tseslint.configs.recommendedTypeChecked,\n];\n\n/** Main config */\nexport default defineConfig(\n ignored,\n eslintRecommendedConfig,\n typescriptEsLintRecommendedConfig,\n typescriptConfig,\n reactConfig,\n javascriptConfig,\n eslintPluginPrettierRecommended,\n);\n`.raw;\n","import { json } from \"@tmpl/core\";\n\nexport const geminiSettingsTemplate = json`\n{\n \"mcpServers\": {\n \"reactor-mcp\": {\n \"type\": \"http\",\n \"url\": \"http://localhost:4001/mcp\"\n }\n }\n}\n`.raw;\n","import { yaml } from \"@tmpl/core\";\n\nexport const syncAndPublishWorkflowTemplate = yaml`\nname: Sync and Publish\n\non:\n # Triggered by powerhouse monorepo after release\n repository_dispatch:\n types: [powerhouse-release]\n\n # Manual trigger\n workflow_dispatch:\n inputs:\n channel:\n description: 'Release channel'\n required: true\n type: choice\n options:\n - dev\n - staging\n - latest\n default: 'staging'\n version:\n description: 'Powerhouse version (e.g., 5.3.0-staging.6)'\n required: false\n type: string\n dry-run:\n description: 'Dry run (skip publishing)'\n required: false\n type: boolean\n default: false\n skip-docker:\n description: 'Skip Docker build and push'\n required: false\n type: boolean\n default: false\n\nenv:\n NODE_VERSION: '24'\n PNPM_VERSION: '10'\n DOCKER_REGISTRY: cr.vetra.io\n GHCR_REGISTRY: ghcr.io\n\njobs:\n # ==========================================================================\n # Determine release parameters\n # ==========================================================================\n prepare:\n name: Prepare Release\n runs-on: ubuntu-latest\n outputs:\n channel: \\${{ steps.params.outputs.channel }}\n version: \\${{ steps.params.outputs.version }}\n branch: \\${{ steps.params.outputs.branch }}\n project_name: \\${{ steps.params.outputs.project_name }}\n dry_run: \\${{ steps.params.outputs.dry_run }}\n skip_docker: \\${{ steps.params.outputs.skip_docker }}\n steps:\n - name: Determine parameters\n id: params\n run: |\n # Get channel from dispatch payload or input\n if [ \"\\${{ github.event_name }}\" = \"repository_dispatch\" ]; then\n CHANNEL=\"\\${{ github.event.client_payload.channel }}\"\n VERSION=\"\\${{ github.event.client_payload.version }}\"\n DRY_RUN=\"false\"\n SKIP_DOCKER=\"false\"\n else\n CHANNEL=\"\\${{ inputs.channel }}\"\n VERSION=\"\\${{ inputs.version }}\"\n DRY_RUN=\"\\${{ inputs.dry-run }}\"\n SKIP_DOCKER=\"\\${{ inputs.skip-docker }}\"\n fi\n\n # Default channel to staging if not set\n CHANNEL=\"\\${CHANNEL:-staging}\"\n\n # Determine branch from channel\n case \"\\$CHANNEL\" in\n dev) BRANCH=\"dev\" ;;\n staging) BRANCH=\"staging\" ;;\n latest|main) BRANCH=\"main\" ;;\n *) BRANCH=\"staging\" ;;\n esac\n\n # Use DOCKER_PROJECT secret if set, otherwise extract from repository name\n if [ -n \"\\${{ secrets.DOCKER_PROJECT }}\" ]; then\n PROJECT_NAME=\"\\${{ secrets.DOCKER_PROJECT }}\"\n else\n PROJECT_NAME=\"\\${GITHUB_REPOSITORY#*/}\"\n fi\n\n echo \"channel=\\$CHANNEL\" >> \\$GITHUB_OUTPUT\n echo \"version=\\$VERSION\" >> \\$GITHUB_OUTPUT\n echo \"branch=\\$BRANCH\" >> \\$GITHUB_OUTPUT\n echo \"project_name=\\$PROJECT_NAME\" >> \\$GITHUB_OUTPUT\n echo \"dry_run=\\$DRY_RUN\" >> \\$GITHUB_OUTPUT\n echo \"skip_docker=\\$SKIP_DOCKER\" >> \\$GITHUB_OUTPUT\n\n echo \"Channel: \\$CHANNEL\"\n echo \"Version: \\$VERSION\"\n echo \"Branch: \\$BRANCH\"\n echo \"Project: \\$PROJECT_NAME\"\n echo \"Dry Run: \\$DRY_RUN\"\n echo \"Skip Docker: \\$SKIP_DOCKER\"\n\n # ==========================================================================\n # Update dependencies and publish to npm\n # ==========================================================================\n update-and-publish:\n name: Update & Publish NPM\n needs: prepare\n runs-on: ubuntu-latest\n permissions:\n contents: write\n id-token: write\n outputs:\n new_version: \\${{ steps.version.outputs.new_version }}\n steps:\n - name: Checkout repository\n uses: actions/checkout@v4\n with:\n ref: \\${{ needs.prepare.outputs.branch }}\n fetch-depth: 0\n token: \\${{ secrets.GITHUB_TOKEN }}\n\n - name: Install pnpm\n uses: pnpm/action-setup@v4\n with:\n version: \\${{ env.PNPM_VERSION }}\n\n - name: Install Node.js\n uses: actions/setup-node@v4\n with:\n node-version: \\${{ env.NODE_VERSION }}\n cache: 'pnpm'\n\n - name: Configure git\n run: |\n git config user.name \"github-actions[bot]\"\n git config user.email \"github-actions[bot]@users.noreply.github.com\"\n\n - name: Install ph-cli\n run: |\n CHANNEL=\"\\${{ needs.prepare.outputs.channel }}\"\n case \"\\$CHANNEL\" in\n dev) pnpm add -g @powerhousedao/ph-cli@dev ;;\n staging) pnpm add -g @powerhousedao/ph-cli@staging ;;\n *) pnpm add -g @powerhousedao/ph-cli@latest ;;\n esac\n\n - name: Update Powerhouse dependencies\n run: ph update\n\n - name: Install dependencies\n run: pnpm install\n\n - name: Build package\n run: pnpm build\n\n - name: Run tests\n run: pnpm test || true\n continue-on-error: true\n\n - name: Bump version\n id: version\n run: |\n CHANNEL=\"\\${{ needs.prepare.outputs.channel }}\"\n CURRENT_VERSION=\\$(node -p \"require('./package.json').version\")\n\n # Determine new version\n if [ \"\\$CHANNEL\" = \"latest\" ] || [ \"\\$CHANNEL\" = \"main\" ]; then\n # For production, use patch bump\n npm version patch --no-git-tag-version\n else\n # For dev/staging, use prerelease\n npm version prerelease --preid=\\$CHANNEL --no-git-tag-version\n fi\n\n NEW_VERSION=\\$(node -p \"require('./package.json').version\")\n echo \"new_version=\\$NEW_VERSION\" >> \\$GITHUB_OUTPUT\n echo \"Bumped version: \\$CURRENT_VERSION -> \\$NEW_VERSION\"\n\n - name: Commit changes\n run: |\n git add package.json pnpm-lock.yaml\n git commit -m \"chore: sync powerhouse dependencies to \\${{ needs.prepare.outputs.version }}\n\n - Updated to powerhouse \\${{ needs.prepare.outputs.version }}\n - Bumped version to \\${{ steps.version.outputs.new_version }}\" || echo \"No changes to commit\"\n\n - name: Push changes\n if: needs.prepare.outputs.dry_run != 'true'\n run: git push\n\n - name: Setup npm for publishing\n if: needs.prepare.outputs.dry_run != 'true' && secrets.NPM_ACCESS_TOKEN != ''\n uses: actions/setup-node@v4\n with:\n node-version: \\${{ env.NODE_VERSION }}\n registry-url: 'https://registry.npmjs.org'\n\n - name: Publish to npm with provenance\n if: needs.prepare.outputs.dry_run != 'true' && secrets.NPM_ACCESS_TOKEN != ''\n env:\n NODE_AUTH_TOKEN: \\${{ secrets.NPM_ACCESS_TOKEN }}\n NPM_CONFIG_PROVENANCE: true\n run: |\n CHANNEL=\"\\${{ needs.prepare.outputs.channel }}\"\n if [ \"\\$CHANNEL\" = \"latest\" ] || [ \"\\$CHANNEL\" = \"main\" ]; then\n pnpm publish --access public --tag latest --no-git-checks\n else\n pnpm publish --access public --tag \\$CHANNEL --no-git-checks\n fi\n\n - name: Create git tag\n if: needs.prepare.outputs.dry_run != 'true'\n run: |\n git tag \"v\\${{ steps.version.outputs.new_version }}\"\n git push origin \"v\\${{ steps.version.outputs.new_version }}\"\n\n # ==========================================================================\n # Build and push Docker images\n # ==========================================================================\n build-docker:\n name: Build Docker Images\n needs: [prepare, update-and-publish]\n if: |\n needs.prepare.outputs.skip_docker != 'true' &&\n needs.prepare.outputs.dry_run != 'true' &&\n secrets.DOCKER_USERNAME != '' &&\n secrets.DOCKER_PASSWORD != ''\n runs-on: ubuntu-latest\n permissions:\n contents: read\n packages: write\n strategy:\n matrix:\n target: [connect, switchboard]\n steps:\n - name: Checkout repository\n uses: actions/checkout@v4\n with:\n ref: \\${{ needs.prepare.outputs.branch }}\n\n - name: Pull latest changes\n run: git pull origin \\${{ needs.prepare.outputs.branch }}\n\n - name: Set up Docker Buildx\n uses: docker/setup-buildx-action@v3\n\n - name: Ensure Docker project exists\n run: |\n PROJECT_NAME=\"\\${{ needs.prepare.outputs.project_name }}\"\n\n # Check if project exists, create if not\n STATUS=\\$(curl -s -o /dev/null -w \"%{http_code}\" \\\\\n -u \"\\${{ secrets.DOCKER_USERNAME }}:\\${{ secrets.DOCKER_PASSWORD }}\" \\\\\n \"https://\\${{ env.DOCKER_REGISTRY }}/api/v2.0/projects?name=\\${PROJECT_NAME}\")\n\n if [ \"\\$STATUS\" = \"200\" ]; then\n # Check if the project is in the response\n EXISTS=\\$(curl -s \\\\\n -u \"\\${{ secrets.DOCKER_USERNAME }}:\\${{ secrets.DOCKER_PASSWORD }}\" \\\\\n \"https://\\${{ env.DOCKER_REGISTRY }}/api/v2.0/projects?name=\\${PROJECT_NAME}\" | \\\\\n jq -r \".[] | select(.name==\\\\\"\\${PROJECT_NAME}\\\\\") | .name\")\n\n if [ \"\\$EXISTS\" = \"\\$PROJECT_NAME\" ]; then\n echo \"Project \\${PROJECT_NAME} already exists\"\n else\n echo \"Creating project \\${PROJECT_NAME}...\"\n curl -X POST \\\\\n -u \"\\${{ secrets.DOCKER_USERNAME }}:\\${{ secrets.DOCKER_PASSWORD }}\" \\\\\n -H \"Content-Type: application/json\" \\\\\n -d \"{\\\\\"project_name\\\\\": \\\\\"\\${PROJECT_NAME}\\\\\", \\\\\"public\\\\\": false}\" \\\\\n \"https://\\${{ env.DOCKER_REGISTRY }}/api/v2.0/projects\"\n fi\n else\n echo \"Creating project \\${PROJECT_NAME}...\"\n curl -X POST \\\\\n -u \"\\${{ secrets.DOCKER_USERNAME }}:\\${{ secrets.DOCKER_PASSWORD }}\" \\\\\n -H \"Content-Type: application/json\" \\\\\n -d \"{\\\\\"project_name\\\\\": \\\\\"\\${PROJECT_NAME}\\\\\", \\\\\"public\\\\\": false}\" \\\\\n \"https://\\${{ env.DOCKER_REGISTRY }}/api/v2.0/projects\"\n fi\n\n - name: Login to GitHub Container Registry\n uses: docker/login-action@v3\n with:\n registry: \\${{ env.GHCR_REGISTRY }}\n username: \\${{ github.actor }}\n password: \\${{ secrets.GITHUB_TOKEN }}\n\n - name: Login to Docker Registry\n uses: docker/login-action@v3\n with:\n registry: \\${{ env.DOCKER_REGISTRY }}\n username: \\${{ secrets.DOCKER_USERNAME }}\n password: \\${{ secrets.DOCKER_PASSWORD }}\n\n - name: Extract package name\n id: package\n run: |\n PACKAGE_NAME=\\$(node -p \"require('./package.json').name\")\n echo \"name=\\$PACKAGE_NAME\" >> \\$GITHUB_OUTPUT\n\n - name: Determine image tags\n id: tags\n run: |\n VERSION=\"\\${{ needs.update-and-publish.outputs.new_version }}\"\n CHANNEL=\"\\${{ needs.prepare.outputs.channel }}\"\n PROJECT=\"\\${{ needs.prepare.outputs.project_name }}\"\n TARGET=\"\\${{ matrix.target }}\"\n\n # GHCR tags\n GHCR_BASE=\"\\${{ env.GHCR_REGISTRY }}/\\${{ github.repository_owner }}/\\${PROJECT}/\\${TARGET}\"\n\n # Docker registry tags\n DOCKER_BASE=\"\\${{ env.DOCKER_REGISTRY }}/\\${PROJECT}/\\${TARGET}\"\n\n # Build tag list\n TAGS=\"\\${GHCR_BASE}:v\\${VERSION}\"\n TAGS=\"\\${TAGS},\\${DOCKER_BASE}:v\\${VERSION}\"\n\n # Add channel tag\n if [ \"\\$CHANNEL\" = \"latest\" ] || [ \"\\$CHANNEL\" = \"main\" ]; then\n TAGS=\"\\${TAGS},\\${GHCR_BASE}:latest\"\n TAGS=\"\\${TAGS},\\${DOCKER_BASE}:latest\"\n else\n TAGS=\"\\${TAGS},\\${GHCR_BASE}:\\${CHANNEL}\"\n TAGS=\"\\${TAGS},\\${DOCKER_BASE}:\\${CHANNEL}\"\n fi\n\n echo \"tags=\\$TAGS\" >> \\$GITHUB_OUTPUT\n echo \"Image tags: \\$TAGS\"\n\n - name: Build and push \\${{ matrix.target }}\n uses: docker/build-push-action@v5\n with:\n context: .\n file: ./Dockerfile\n target: \\${{ matrix.target }}\n push: true\n tags: \\${{ steps.tags.outputs.tags }}\n build-args: |\n TAG=\\${{ needs.prepare.outputs.channel == 'latest' && 'latest' || needs.prepare.outputs.version }}\n PACKAGE_NAME=\\${{ steps.package.outputs.name }}\n PH_CONNECT_BASE_PATH=/\n cache-from: type=gha\n cache-to: type=gha,mode=max\n\n # ==========================================================================\n # Summary\n # ==========================================================================\n summary:\n name: Release Summary\n needs: [prepare, update-and-publish, build-docker]\n if: always()\n runs-on: ubuntu-latest\n steps:\n - name: Summary\n run: |\n echo \"## Release Summary\" >> \\$GITHUB_STEP_SUMMARY\n echo \"\" >> \\$GITHUB_STEP_SUMMARY\n echo \"| Parameter | Value |\" >> \\$GITHUB_STEP_SUMMARY\n echo \"|-----------|-------|\" >> \\$GITHUB_STEP_SUMMARY\n echo \"| Channel | \\${{ needs.prepare.outputs.channel }} |\" >> \\$GITHUB_STEP_SUMMARY\n echo \"| Branch | \\${{ needs.prepare.outputs.branch }} |\" >> \\$GITHUB_STEP_SUMMARY\n echo \"| Powerhouse Version | \\${{ needs.prepare.outputs.version }} |\" >> \\$GITHUB_STEP_SUMMARY\n echo \"| Package Version | \\${{ needs.update-and-publish.outputs.new_version }} |\" >> \\$GITHUB_STEP_SUMMARY\n echo \"| Dry Run | \\${{ needs.prepare.outputs.dry_run }} |\" >> \\$GITHUB_STEP_SUMMARY\n echo \"\" >> \\$GITHUB_STEP_SUMMARY\n echo \"### Docker Images\" >> \\$GITHUB_STEP_SUMMARY\n echo \"- \\\\\\`\\${{ env.DOCKER_REGISTRY }}/\\${{ needs.prepare.outputs.project_name }}/connect:v\\${{ needs.update-and-publish.outputs.new_version }}\\\\\\`\" >> \\$GITHUB_STEP_SUMMARY\n echo \"- \\\\\\`\\${{ env.DOCKER_REGISTRY }}/\\${{ needs.prepare.outputs.project_name }}/switchboard:v\\${{ needs.update-and-publish.outputs.new_version }}\\\\\\`\" >> \\$GITHUB_STEP_SUMMARY\n`.raw;\n","export const gitIgnoreTemplate = `\ndist\ncoverage\nnode_modules\n.eslintcache\n.env.local\n\n.ph\nprojects-import.js\n`;\n","import { html } from \"@tmpl/core\";\n\nexport const indexHtmlTemplate = html`<!doctype html>\n <html lang=\"en\">\n <head>\n <meta charset=\"UTF-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <meta\n name=\"description\"\n content=\"Connect is a hub for your most important documents and processes, translated into software. Easily capture data in a structured way with dedicated business process packages. Collaborate on shared documents with ease while using your preferred storage solution (decentralized, centralized, or local).\"\n />\n <title>Powerhouse Connect</title>\n <link rel=\"icon\" href=\"/icon.ico\" />\n <link rel=\"preconnect\" href=\"https://fonts.googleapis.com\" />\n <link rel=\"preconnect\" href=\"https://fonts.gstatic.com\" crossorigin />\n <link\n href=\"https://fonts.googleapis.com/css2?family=Inter:ital,opsz,wght@0,14..32,100..900;1,14..32,100..900&display=swap\"\n rel=\"stylesheet\"\n />\n </head>\n <body>\n <div id=\"root\"></div>\n <link href=\"/style.css\" rel=\"stylesheet\" />\n <script type=\"module\" src=\"/main.tsx\"></script>\n </body>\n </html> `.raw;\n","import { html } from \"@tmpl/core\";\n\nexport const legacyIndexHtmlTemplate = html`\n <!DOCTYPE html>\n <html lang=\"en\">\n <head>\n <meta charset=\"UTF-8\" />\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\" />\n <meta\n name=\"description\"\n content=\"Connect is a hub for your most important documents and processes, translated into software. Easily capture data in a structured way with dedicated business process packages. Collaborate on shared documents with ease while using your preferred storage solution (decentralized, centralized, or local).\"\n />\n <title>Powerhouse Connect</title>\n <link rel=\"icon\" href=\"/icon.ico\" />\n <link rel=\"preconnect\" href=\"https://fonts.googleapis.com\" />\n <link rel=\"preconnect\" href=\"https://fonts.gstatic.com\" crossorigin />\n <link\n href=\"https://fonts.googleapis.com/css2?family=Inter:ital,opsz,wght@0,14..32,100..900;1,14..32,100..900&display=swap\"\n rel=\"stylesheet\"\n />\n <style>\n @import \"tailwindcss\";\n @import \"@powerhousedao/design-system/style.css\";\n @import \"@powerhousedao/document-engineering/style.css\";\n @source \"./node_modules/@powerhousedao/connect\";\n </style>\n </head>\n <body>\n <div id=\"root\"></div>\n <script type=\"module\">\n // initializes Connect on '<div id=\"root\"></div>'\n import \"@powerhousedao/connect/main.js\";\n </script>\n </body>\n </html>\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\nexport const indexTsTemplate = ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nimport type { Manifest } from \"document-model\";\nimport manifestJson from \"./powerhouse.manifest.json\" with { type: \"json\" };\nexport { documentModels } from \"./document-models/document-models.js\";\nexport { upgradeManifests } from \"./document-models/upgrade-manifests.js\";\nexport { editors } from \"./editors/editors.js\";\nexport { processorFactory } from \"./processors/factory.js\";\nexport const manifest: Manifest = manifestJson;\n`.raw;\n","export const licenseTemplate = `\n GNU AFFERO GENERAL PUBLIC LICENSE\n Version 3, 19 November 2007\n\n Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>\n Everyone is permitted to copy and distribute verbatim copies\n of this license document, but changing it is not allowed.\n\n Preamble\n\n The GNU Affero General Public License is a free, copyleft license for\nsoftware and other kinds of works, specifically designed to ensure\ncooperation with the community in the case of network server software.\n\n The licenses for most software and other practical works are designed\nto take away your freedom to share and change the works. By contrast,\nour General Public Licenses are intended to guarantee your freedom to\nshare and change all versions of a program--to make sure it remains free\nsoftware for all its users.\n\n When we speak of free software, we are referring to freedom, not\nprice. Our General Public Licenses are designed to make sure that you\nhave the freedom to distribute copies of free software (and charge for\nthem if you wish), that you receive source code or can get it if you\nwant it, that you can change the software or use pieces of it in new\nfree programs, and that you know you can do these things.\n\n Developers that use our General Public Licenses protect your rights\nwith two steps: (1) assert copyright on the software, and (2) offer\nyou this License which gives you legal permission to copy, distribute\nand/or modify the software.\n\n A secondary benefit of defending all users' freedom is that\nimprovements made in alternate versions of the program, if they\nreceive widespread use, become available for other developers to\nincorporate. Many developers of free software are heartened and\nencouraged by the resulting cooperation. However, in the case of\nsoftware used on network servers, this result may fail to come about.\nThe GNU General Public License permits making a modified version and\nletting the public access it on a server without ever releasing its\nsource code to the public.\n\n The GNU Affero General Public License is designed specifically to\nensure that, in such cases, the modified source code becomes available\nto the community. It requires the operator of a network server to\nprovide the source code of the modified version running there to the\nusers of that server. Therefore, public use of a modified version, on\na publicly accessible server, gives the public access to the source\ncode of the modified version.\n\n An older license, called the Affero General Public License and\npublished by Affero, was designed to accomplish similar goals. This is\na different license, not a version of the Affero GPL, but Affero has\nreleased a new version of the Affero GPL which permits relicensing under\nthis license.\n\n The precise terms and conditions for copying, distribution and\nmodification follow.\n\n TERMS AND CONDITIONS\n\n 0. Definitions.\n\n \"This License\" refers to version 3 of the GNU Affero General Public License.\n\n \"Copyright\" also means copyright-like laws that apply to other kinds of\nworks, such as semiconductor masks.\n\n \"The Program\" refers to any copyrightable work licensed under this\nLicense. Each licensee is addressed as \"you\". \"Licensees\" and\n\"recipients\" may be individuals or organizations.\n\n To \"modify\" a work means to copy from or adapt all or part of the work\nin a fashion requiring copyright permission, other than the making of an\nexact copy. The resulting work is called a \"modified version\" of the\nearlier work or a work \"based on\" the earlier work.\n\n A \"covered work\" means either the unmodified Program or a work based\non the Program.\n\n To \"propagate\" a work means to do anything with it that, without\npermission, would make you directly or secondarily liable for\ninfringement under applicable copyright law, except executing it on a\ncomputer or modifying a private copy. Propagation includes copying,\ndistribution (with or without modification), making available to the\npublic, and in some countries other activities as well.\n\n To \"convey\" a work means any kind of propagation that enables other\nparties to make or receive copies. Mere interaction with a user through\na computer network, with no transfer of a copy, is not conveying.\n\n An interactive user interface displays \"Appropriate Legal Notices\"\nto the extent that it includes a convenient and prominently visible\nfeature that (1) displays an appropriate copyright notice, and (2)\ntells the user that there is no warranty for the work (except to the\nextent that warranties are provided), that licensees may convey the\nwork under this License, and how to view a copy of this License. If\nthe interface presents a list of user commands or options, such as a\nmenu, a prominent item in the list meets this criterion.\n\n 1. Source Code.\n\n The \"source code\" for a work means the preferred form of the work\nfor making modifications to it. \"Object code\" means any non-source\nform of a work.\n\n A \"Standard Interface\" means an interface that either is an official\nstandard defined by a recognized standards body, or, in the case of\ninterfaces specified for a particular programming language, one that\nis widely used among developers working in that language.\n\n The \"System Libraries\" of an executable work include anything, other\nthan the work as a whole, that (a) is included in the normal form of\npackaging a Major Component, but which is not part of that Major\nComponent, and (b) serves only to enable use of the work with that\nMajor Component, or to implement a Standard Interface for which an\nimplementation is available to the public in source code form. A\n\"Major Component\", in this context, means a major essential component\n(kernel, window system, and so on) of the specific operating system\n(if any) on which the executable work runs, or a compiler used to\nproduce the work, or an object code interpreter used to run it.\n\n The \"Corresponding Source\" for a work in object code form means all\nthe source code needed to generate, install, and (for an executable\nwork) run the object code and to modify the work, including scripts to\ncontrol those activities. However, it does not include the work's\nSystem Libraries, or general-purpose tools or generally available free\nprograms which are used unmodified in performing those activities but\nwhich are not part of the work. For example, Corresponding Source\nincludes interface definition files associated with source files for\nthe work, and the source code for shared libraries and dynamically\nlinked subprograms that the work is specifically designed to require,\nsuch as by intimate data communication or control flow between those\nsubprograms and other parts of the work.\n\n The Corresponding Source need not include anything that users\ncan regenerate automatically from other parts of the Corresponding\nSource.\n\n The Corresponding Source for a work in source code form is that\nsame work.\n\n 2. Basic Permissions.\n\n All rights granted under this License are granted for the term of\ncopyright on the Program, and are irrevocable provided the stated\nconditions are met. This License explicitly affirms your unlimited\npermission to run the unmodified Program. The output from running a\ncovered work is covered by this License only if the output, given its\ncontent, constitutes a covered work. This License acknowledges your\nrights of fair use or other equivalent, as provided by copyright law.\n\n You may make, run and propagate covered works that you do not\nconvey, without conditions so long as your license otherwise remains\nin force. You may convey covered works to others for the sole purpose\nof having them make modifications exclusively for you, or provide you\nwith facilities for running those works, provided that you comply with\nthe terms of this License in conveying all material for which you do\nnot control copyright. Those thus making or running the covered works\nfor you must do so exclusively on your behalf, under your direction\nand control, on terms that prohibit them from making any copies of\nyour copyrighted material outside their relationship with you.\n\n Conveying under any other circumstances is permitted solely under\nthe conditions stated below. Sublicensing is not allowed; section 10\nmakes it unnecessary.\n\n 3. Protecting Users' Legal Rights From Anti-Circumvention Law.\n\n No covered work shall be deemed part of an effective technological\nmeasure under any applicable law fulfilling obligations under article\n11 of the WIPO copyright treaty adopted on 20 December 1996, or\nsimilar laws prohibiting or restricting circumvention of such\nmeasures.\n\n When you convey a covered work, you waive any legal power to forbid\ncircumvention of technological measures to the extent such circumvention\nis effected by exercising rights under this License with respect to\nthe covered work, and you disclaim any intention to limit operation or\nmodification of the work as a means of enforcing, against the work's\nusers, your or third parties' legal rights to forbid circumvention of\ntechnological measures.\n\n 4. Conveying Verbatim Copies.\n\n You may convey verbatim copies of the Program's source code as you\nreceive it, in any medium, provided that you conspicuously and\nappropriately publish on each copy an appropriate copyright notice;\nkeep intact all notices stating that this License and any\nnon-permissive terms added in accord with section 7 apply to the code;\nkeep intact all notices of the absence of any warranty; and give all\nrecipients a copy of this License along with the Program.\n\n You may charge any price or no price for each copy that you convey,\nand you may offer support or warranty protection for a fee.\n\n 5. Conveying Modified Source Versions.\n\n You may convey a work based on the Program, or the modifications to\nproduce it from the Program, in the form of source code under the\nterms of section 4, provided that you also meet all of these conditions:\n\n a) The work must carry prominent notices stating that you modified\n it, and giving a relevant date.\n\n b) The work must carry prominent notices stating that it is\n released under this License and any conditions added under section\n 7. This requirement modifies the requirement in section 4 to\n \"keep intact all notices\".\n\n c) You must license the entire work, as a whole, under this\n License to anyone who comes into possession of a copy. This\n License will therefore apply, along with any applicable section 7\n additional terms, to the whole of the work, and all its parts,\n regardless of how they are packaged. This License gives no\n permission to license the work in any other way, but it does not\n invalidate such permission if you have separately received it.\n\n d) If the work has interactive user interfaces, each must display\n Appropriate Legal Notices; however, if the Program has interactive\n interfaces that do not display Appropriate Legal Notices, your\n work need not make them do so.\n\n A compilation of a covered work with other separate and independent\nworks, which are not by their nature extensions of the covered work,\nand which are not combined with it such as to form a larger program,\nin or on a volume of a storage or distribution medium, is called an\n\"aggregate\" if the compilation and its resulting copyright are not\nused to limit the access or legal rights of the compilation's users\nbeyond what the individual works permit. Inclusion of a covered work\nin an aggregate does not cause this License to apply to the other\nparts of the aggregate.\n\n 6. Conveying Non-Source Forms.\n\n You may convey a covered work in object code form under the terms\nof sections 4 and 5, provided that you also convey the\nmachine-readable Corresponding Source under the terms of this License,\nin one of these ways:\n\n a) Convey the object code in, or embodied in, a physical product\n (including a physical distribution medium), accompanied by the\n Corresponding Source fixed on a durable physical medium\n customarily used for software interchange.\n\n b) Convey the object code in, or embodied in, a physical product\n (including a physical distribution medium), accompanied by a\n written offer, valid for at least three years and valid for as\n long as you offer spare parts or customer support for that product\n model, to give anyone who possesses the object code either (1) a\n copy of the Corresponding Source for all the software in the\n product that is covered by this License, on a durable physical\n medium customarily used for software interchange, for a price no\n more than your reasonable cost of physically performing this\n conveying of source, or (2) access to copy the\n Corresponding Source from a network server at no charge.\n\n c) Convey individual copies of the object code with a copy of the\n written offer to provide the Corresponding Source. This\n alternative is allowed only occasionally and noncommercially, and\n only if you received the object code with such an offer, in accord\n with subsection 6b.\n\n d) Convey the object code by offering access from a designated\n place (gratis or for a charge), and offer equivalent access to the\n Corresponding Source in the same way through the same place at no\n further charge. You need not require recipients to copy the\n Corresponding Source along with the object code. If the place to\n copy the object code is a network server, the Corresponding Source\n may be on a different server (operated by you or a third party)\n that supports equivalent copying facilities, provided you maintain\n clear directions next to the object code saying where to find the\n Corresponding Source. Regardless of what server hosts the\n Corresponding Source, you remain obligated to ensure that it is\n available for as long as needed to satisfy these requirements.\n\n e) Convey the object code using peer-to-peer transmission, provided\n you inform other peers where the object code and Corresponding\n Source of the work are being offered to the general public at no\n charge under subsection 6d.\n\n A separable portion of the object code, whose source code is excluded\nfrom the Corresponding Source as a System Library, need not be\nincluded in conveying the object code work.\n\n A \"User Product\" is either (1) a \"consumer product\", which means any\ntangible personal property which is normally used for personal, family,\nor household purposes, or (2) anything designed or sold for incorporation\ninto a dwelling. In determining whether a product is a consumer product,\ndoubtful cases shall be resolved in favor of coverage. For a particular\nproduct received by a particular user, \"normally used\" refers to a\ntypical or common use of that class of product, regardless of the status\nof the particular user or of the way in which the particular user\nactually uses, or expects or is expected to use, the product. A product\nis a consumer product regardless of whether the product has substantial\ncommercial, industrial or non-consumer uses, unless such uses represent\nthe only significant mode of use of the product.\n\n \"Installation Information\" for a User Product means any methods,\nprocedures, authorization keys, or other information required to install\nand execute modified versions of a covered work in that User Product from\na modified version of its Corresponding Source. The information must\nsuffice to ensure that the continued functioning of the modified object\ncode is in no case prevented or interfered with solely because\nmodification has been made.\n\n If you convey an object code work under this section in, or with, or\nspecifically for use in, a User Product, and the conveying occurs as\npart of a transaction in which the right of possession and use of the\nUser Product is transferred to the recipient in perpetuity or for a\nfixed term (regardless of how the transaction is characterized), the\nCorresponding Source conveyed under this section must be accompanied\nby the Installation Information. But this requirement does not apply\nif neither you nor any third party retains the ability to install\nmodified object code on the User Product (for example, the work has\nbeen installed in ROM).\n\n The requirement to provide Installation Information does not include a\nrequirement to continue to provide support service, warranty, or updates\nfor a work that has been modified or installed by the recipient, or for\nthe User Product in which it has been modified or installed. Access to a\nnetwork may be denied when the modification itself materially and\nadversely affects the operation of the network or violates the rules and\nprotocols for communication across the network.\n\n Corresponding Source conveyed, and Installation Information provided,\nin accord with this section must be in a format that is publicly\ndocumented (and with an implementation available to the public in\nsource code form), and must require no special password or key for\nunpacking, reading or copying.\n\n 7. Additional Terms.\n\n \"Additional permissions\" are terms that supplement the terms of this\nLicense by making exceptions from one or more of its conditions.\nAdditional permissions that are applicable to the entire Program shall\nbe treated as though they were included in this License, to the extent\nthat they are valid under applicable law. If additional permissions\napply only to part of the Program, that part may be used separately\nunder those permissions, but the entire Program remains governed by\nthis License without regard to the additional permissions.\n\n When you convey a copy of a covered work, you may at your option\nremove any additional permissions from that copy, or from any part of\nit. (Additional permissions may be written to require their own\nremoval in certain cases when you modify the work.) You may place\nadditional permissions on material, added by you to a covered work,\nfor which you have or can give appropriate copyright permission.\n\n Notwithstanding any other provision of this License, for material you\nadd to a covered work, you may (if authorized by the copyright holders of\nthat material) supplement the terms of this License with terms:\n\n a) Disclaiming warranty or limiting liability differently from the\n terms of sections 15 and 16 of this License; or\n\n b) Requiring preservation of specified reasonable legal notices or\n author attributions in that material or in the Appropriate Legal\n Notices displayed by works containing it; or\n\n c) Prohibiting misrepresentation of the origin of that material, or\n requiring that modified versions of such material be marked in\n reasonable ways as different from the original version; or\n\n d) Limiting the use for publicity purposes of names of licensors or\n authors of the material; or\n\n e) Declining to grant rights under trademark law for use of some\n trade names, trademarks, or service marks; or\n\n f) Requiring indemnification of licensors and authors of that\n material by anyone who conveys the material (or modified versions of\n it) with contractual assumptions of liability to the recipient, for\n any liability that these contractual assumptions directly impose on\n those licensors and authors.\n\n All other non-permissive additional terms are considered \"further\nrestrictions\" within the meaning of section 10. If the Program as you\nreceived it, or any part of it, contains a notice stating that it is\ngoverned by this License along with a term that is a further\nrestriction, you may remove that term. If a license document contains\na further restriction but permits relicensing or conveying under this\nLicense, you may add to a covered work material governed by the terms\nof that license document, provided that the further restriction does\nnot survive such relicensing or conveying.\n\n If you add terms to a covered work in accord with this section, you\nmust place, in the relevant source files, a statement of the\nadditional terms that apply to those files, or a notice indicating\nwhere to find the applicable terms.\n\n Additional terms, permissive or non-permissive, may be stated in the\nform of a separately written license, or stated as exceptions;\nthe above requirements apply either way.\n\n 8. Termination.\n\n You may not propagate or modify a covered work except as expressly\nprovided under this License. Any attempt otherwise to propagate or\nmodify it is void, and will automatically terminate your rights under\nthis License (including any patent licenses granted under the third\nparagraph of section 11).\n\n However, if you cease all violation of this License, then your\nlicense from a particular copyright holder is reinstated (a)\nprovisionally, unless and until the copyright holder explicitly and\nfinally terminates your license, and (b) permanently, if the copyright\nholder fails to notify you of the violation by some reasonable means\nprior to 60 days after the cessation.\n\n Moreover, your license from a particular copyright holder is\nreinstated permanently if the copyright holder notifies you of the\nviolation by some reasonable means, this is the first time you have\nreceived notice of violation of this License (for any work) from that\ncopyright holder, and you cure the violation prior to 30 days after\nyour receipt of the notice.\n\n Termination of your rights under this section does not terminate the\nlicenses of parties who have received copies or rights from you under\nthis License. If your rights have been terminated and not permanently\nreinstated, you do not qualify to receive new licenses for the same\nmaterial under section 10.\n\n 9. Acceptance Not Required for Having Copies.\n\n You are not required to accept this License in order to receive or\nrun a copy of the Program. Ancillary propagation of a covered work\noccurring solely as a consequence of using peer-to-peer transmission\nto receive a copy likewise does not require acceptance. However,\nnothing other than this License grants you permission to propagate or\nmodify any covered work. These actions infringe copyright if you do\nnot accept this License. Therefore, by modifying or propagating a\ncovered work, you indicate your acceptance of this License to do so.\n\n 10. Automatic Licensing of Downstream Recipients.\n\n Each time you convey a covered work, the recipient automatically\nreceives a license from the original licensors, to run, modify and\npropagate that work, subject to this License. You are not responsible\nfor enforcing compliance by third parties with this License.\n\n An \"entity transaction\" is a transaction transferring control of an\norganization, or substantially all assets of one, or subdividing an\norganization, or merging organizations. If propagation of a covered\nwork results from an entity transaction, each party to that\ntransaction who receives a copy of the work also receives whatever\nlicenses to the work the party's predecessor in interest had or could\ngive under the previous paragraph, plus a right to possession of the\nCorresponding Source of the work from the predecessor in interest, if\nthe predecessor has it or can get it with reasonable efforts.\n\n You may not impose any further restrictions on the exercise of the\nrights granted or affirmed under this License. For example, you may\nnot impose a license fee, royalty, or other charge for exercise of\nrights granted under this License, and you may not initiate litigation\n(including a cross-claim or counterclaim in a lawsuit) alleging that\nany patent claim is infringed by making, using, selling, offering for\nsale, or importing the Program or any portion of it.\n\n 11. Patents.\n\n A \"contributor\" is a copyright holder who authorizes use under this\nLicense of the Program or a work on which the Program is based. The\nwork thus licensed is called the contributor's \"contributor version\".\n\n A contributor's \"essential patent claims\" are all patent claims\nowned or controlled by the contributor, whether already acquired or\nhereafter acquired, that would be infringed by some manner, permitted\nby this License, of making, using, or selling its contributor version,\nbut do not include claims that would be infringed only as a\nconsequence of further modification of the contributor version. For\npurposes of this definition, \"control\" includes the right to grant\npatent sublicenses in a manner consistent with the requirements of\nthis License.\n\n Each contributor grants you a non-exclusive, worldwide, royalty-free\npatent license under the contributor's essential patent claims, to\nmake, use, sell, offer for sale, import and otherwise run, modify and\npropagate the contents of its contributor version.\n\n In the following three paragraphs, a \"patent license\" is any express\nagreement or commitment, however denominated, not to enforce a patent\n(such as an express permission to practice a patent or covenant not to\nsue for patent infringement). To \"grant\" such a patent license to a\nparty means to make such an agreement or commitment not to enforce a\npatent against the party.\n\n If you convey a covered work, knowingly relying on a patent license,\nand the Corresponding Source of the work is not available for anyone\nto copy, free of charge and under the terms of this License, through a\npublicly available network server or other readily accessible means,\nthen you must either (1) cause the Corresponding Source to be so\navailable, or (2) arrange to deprive yourself of the benefit of the\npatent license for this particular work, or (3) arrange, in a manner\nconsistent with the requirements of this License, to extend the patent\nlicense to downstream recipients. \"Knowingly relying\" means you have\nactual knowledge that, but for the patent license, your conveying the\ncovered work in a country, or your recipient's use of the covered work\nin a country, would infringe one or more identifiable patents in that\ncountry that you have reason to believe are valid.\n\n If, pursuant to or in connection with a single transaction or\narrangement, you convey, or propagate by procuring conveyance of, a\ncovered work, and grant a patent license to some of the parties\nreceiving the covered work authorizing them to use, propagate, modify\nor convey a specific copy of the covered work, then the patent license\nyou grant is automatically extended to all recipients of the covered\nwork and works based on it.\n\n A patent license is \"discriminatory\" if it does not include within\nthe scope of its coverage, prohibits the exercise of, or is\nconditioned on the non-exercise of one or more of the rights that are\nspecifically granted under this License. You may not convey a covered\nwork if you are a party to an arrangement with a third party that is\nin the business of distributing software, under which you make payment\nto the third party based on the extent of your activity of conveying\nthe work, and under which the third party grants, to any of the\nparties who would receive the covered work from you, a discriminatory\npatent license (a) in connection with copies of the covered work\nconveyed by you (or copies made from those copies), or (b) primarily\nfor and in connection with specific products or compilations that\ncontain the covered work, unless you entered into that arrangement,\nor that patent license was granted, prior to 28 March 2007.\n\n Nothing in this License shall be construed as excluding or limiting\nany implied license or other defenses to infringement that may\notherwise be available to you under applicable patent law.\n\n 12. No Surrender of Others' Freedom.\n\n If conditions are imposed on you (whether by court order, agreement or\notherwise) that contradict the conditions of this License, they do not\nexcuse you from the conditions of this License. If you cannot convey a\ncovered work so as to satisfy simultaneously your obligations under this\nLicense and any other pertinent obligations, then as a consequence you may\nnot convey it at all. For example, if you agree to terms that obligate you\nto collect a royalty for further conveying from those to whom you convey\nthe Program, the only way you could satisfy both those terms and this\nLicense would be to refrain entirely from conveying the Program.\n\n 13. Remote Network Interaction; Use with the GNU General Public License.\n\n Notwithstanding any other provision of this License, if you modify the\nProgram, your modified version must prominently offer all users\ninteracting with it remotely through a computer network (if your version\nsupports such interaction) an opportunity to receive the Corresponding\nSource of your version by providing access to the Corresponding Source\nfrom a network server at no charge, through some standard or customary\nmeans of facilitating copying of software. This Corresponding Source\nshall include the Corresponding Source for any work covered by version 3\nof the GNU General Public License that is incorporated pursuant to the\nfollowing paragraph.\n\n Notwithstanding any other provision of this License, you have\npermission to link or combine any covered work with a work licensed\nunder version 3 of the GNU General Public License into a single\ncombined work, and to convey the resulting work. The terms of this\nLicense will continue to apply to the part which is the covered work,\nbut the work with which it is combined will remain governed by version\n3 of the GNU General Public License.\n\n 14. Revised Versions of this License.\n\n The Free Software Foundation may publish revised and/or new versions of\nthe GNU Affero General Public License from time to time. Such new versions\nwill be similar in spirit to the present version, but may differ in detail to\naddress new problems or concerns.\n\n Each version is given a distinguishing version number. If the\nProgram specifies that a certain numbered version of the GNU Affero General\nPublic License \"or any later version\" applies to it, you have the\noption of following the terms and conditions either of that numbered\nversion or of any later version published by the Free Software\nFoundation. If the Program does not specify a version number of the\nGNU Affero General Public License, you may choose any version ever published\nby the Free Software Foundation.\n\n If the Program specifies that a proxy can decide which future\nversions of the GNU Affero General Public License can be used, that proxy's\npublic statement of acceptance of a version permanently authorizes you\nto choose that version for the Program.\n\n Later license versions may give you additional or different\npermissions. However, no additional obligations are imposed on any\nauthor or copyright holder as a result of your choosing to follow a\nlater version.\n\n 15. Disclaimer of Warranty.\n\n THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY\nAPPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT\nHOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY\nOF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,\nTHE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR\nPURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM\nIS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF\nALL NECESSARY SERVICING, REPAIR OR CORRECTION.\n\n 16. Limitation of Liability.\n\n IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING\nWILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS\nTHE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY\nGENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE\nUSE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF\nDATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD\nPARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),\nEVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF\nSUCH DAMAGES.\n\n 17. Interpretation of Sections 15 and 16.\n\n If the disclaimer of warranty and limitation of liability provided\nabove cannot be given local legal effect according to their terms,\nreviewing courts shall apply local law that most closely approximates\nan absolute waiver of all civil liability in connection with the\nProgram, unless a warranty or assumption of liability accompanies a\ncopy of the Program in return for a fee.\n\n END OF TERMS AND CONDITIONS\n\n How to Apply These Terms to Your New Programs\n\n If you develop a new program, and you want it to be of the greatest\npossible use to the public, the best way to achieve this is to make it\nfree software which everyone can redistribute and change under these terms.\n\n To do so, attach the following notices to the program. It is safest\nto attach them to the start of each source file to most effectively\nstate the exclusion of warranty; and each file should have at least\nthe \"copyright\" line and a pointer to where the full notice is found.\n\n <one line to give the program's name and a brief idea of what it does.>\n Copyright (C) <year> <name of author>\n\n This program is free software: you can redistribute it and/or modify\n it under the terms of the GNU Affero General Public License as published\n by the Free Software Foundation, either version 3 of the License, or\n (at your option) any later version.\n\n This program is distributed in the hope that it will be useful,\n but WITHOUT ANY WARRANTY; without even the implied warranty of\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n GNU Affero General Public License for more details.\n\n You should have received a copy of the GNU Affero General Public License\n along with this program. If not, see <https://www.gnu.org/licenses/>.\n\nAlso add information on how to contact you by electronic and paper mail.\n\n If your software can interact with users remotely through a computer\nnetwork, you should also make sure that it provides a way for users to\nget its source. For example, if your program is a web application, its\ninterface could display a \"Source\" link that leads users to an archive\nof the code. There are many ways you could offer source, and different\nsolutions will be better for different programs; see section 13 for the\nspecific requirements.\n\n You should also get your employer (if you work as a programmer) or school,\nif any, to sign a \"copyright disclaimer\" for the program, if necessary.\nFor more information on this, and how to apply and follow the GNU AGPL, see\n<https://www.gnu.org/licenses/>.\n`;\n","import { tsx } from \"@tmpl/core\";\n\nexport const mainTsxTemplate = tsx`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nimport { startConnect } from \"@powerhousedao/connect\";\nimport * as localPackage from \"./index.js\";\n\nconst { updateLocalPackage } = startConnect(localPackage);\n\nif (import.meta.hot) {\n import.meta.hot.accept([\"./index.js\"], ([newModule]) => {\n if (newModule) {\n updateLocalPackage(newModule);\n }\n });\n}\n`.raw;\n","import { json } from \"@tmpl/core\";\n\nexport const mcpTemplate = json`\n{\n \"mcpServers\": {\n \"reactor-mcp\": {\n \"type\": \"http\",\n \"url\": \"http://localhost:4001/mcp\"\n }\n }\n}\n`.raw;\n","export const npmrcTemplate = `@jsr:registry=https://npm.jsr.io`;\n","import type { Manifest } from \"@powerhousedao/shared\";\n\nexport const packageJsonExports = {\n \".\": {\n types: \"./dist/browser/index.d.ts\",\n browser: \"./dist/browser/index.js\",\n node: \"./dist/node/index.mjs\",\n },\n \"./document-models\": {\n types: \"./dist/browser/document-models/index.d.ts\",\n browser: \"./dist/browser/document-models/index.js\",\n node: \"./dist/node/document-models/index.mjs\",\n },\n \"./document-models/*\": {\n types: \"./dist/browser/document-models/*/index.d.ts\",\n browser: \"./dist/browser/document-models/*/index.js\",\n node: \"./dist/node/document-models/*/index.mjs\",\n },\n \"./editors\": {\n types: \"./dist/browser/editors/index.d.ts\",\n browser: \"./dist/browser/editors/index.js\",\n node: \"./dist/node/editors/index.mjs\",\n },\n \"./editors/*\": {\n types: \"./dist/browser/editors/*/editor.d.ts\",\n browser: \"./dist/browser/editors/*/editor.js\",\n node: \"./dist/node/editors/*/editor.mjs\",\n },\n \"./subgraphs\": {\n types: \"./dist/browser/subgraphs/index.d.ts\",\n browser: \"./dist/browser/subgraphs/index.js\",\n node: \"./dist/node/subgraphs/index.mjs\",\n },\n \"./processors\": {\n types: \"./dist/browser/processors/index.d.ts\",\n browser: \"./dist/browser/processors/index.js\",\n node: \"./dist/node/processors/index.mjs\",\n },\n \"./manifest\": \"./dist/powerhouse.manifest.json\",\n \"./style.css\": \"./dist/style.css\",\n} as const;\n\nexport const packageScripts = {\n \"test:watch\": \"vitest\",\n lint: \"eslint --config eslint.config.js --cache\",\n \"lint:fix\": \"npm run lint -- --fix\",\n tsc: \"tsc\",\n \"tsc:watch\": \"tsc --watch\",\n generate: \"ph-cli generate\",\n connect: \"ph-cli connect\",\n build: \"ph-cli build\",\n reactor: \"ph-cli reactor\",\n service: \"ph-cli service\",\n vetra: \"ph-cli vetra\",\n \"service-startup\":\n \"bash ./node_modules/@powerhousedao/ph-cli/dist/scripts/service-startup.sh\",\n \"service-unstartup\":\n \"bash ./node_modules/@powerhousedao/ph-cli/dist/scripts/service-unstartup.sh\",\n} as const;\n\nexport const externalDependencies = {\n \"@powerhousedao/document-engineering\": \"1.40.1\",\n graphql: \"16.12.0\",\n \"graphql-tag\": \"^2.12.6\",\n zod: \"^4.3.5\",\n react: \"^19.2.3\",\n \"react-dom\": \"^19.2.3\",\n} as const;\n\nexport const externalDevDependencies = {\n \"@electric-sql/pglite\": \"0.3.15\",\n \"@electric-sql/pglite-tools\": \"0.2.20\",\n \"@eslint/js\": \"^9.38.0\",\n \"@tailwindcss/cli\": \"^4.1.18\",\n \"@types/node\": \"^24.9.2\",\n \"@types/react\": \"^19.2.3\",\n \"@vitejs/plugin-react\": \"6.0.1\",\n eslint: \"^9.38.0\",\n \"eslint-config-prettier\": \"^10.1.8\",\n \"eslint-plugin-prettier\": \"^5.5.4\",\n \"eslint-plugin-react\": \"^7.37.5\",\n \"eslint-plugin-react-hooks\": \"^7.0.1\",\n globals: \"^16.4.0\",\n tailwindcss: \"^4.1.16\",\n typescript: \"^5.9.3\",\n \"typescript-eslint\": \"^8.46.2\",\n vite: \"8.0.8\",\n \"vite-tsconfig-paths\": \"6.1.1\",\n vitest: \"4.1.1\",\n} as const;\n\nexport const defaultManifest: Manifest = {\n name: \"\",\n description: \"\",\n category: \"\",\n publisher: {\n name: \"\",\n url: \"\",\n },\n documentModels: [],\n editors: [],\n apps: [],\n subgraphs: [],\n processors: [],\n};\n","import { json } from \"@tmpl/core\";\nimport {\n externalDependencies,\n externalDevDependencies,\n packageJsonExports,\n} from \"../../file-builders/constants.js\";\n\n/**\n * Renders a JS object as the inner body of a JSON object\n */\nfunction innerJsonBody(value: object): string {\n return JSON.stringify(value, null, 2).slice(2, -2).trimEnd();\n}\n\nexport const exportsTemplate = innerJsonBody(packageJsonExports);\n\nconst externalDepsTemplate = innerJsonBody(externalDependencies);\nconst externalDevDepsTemplate = innerJsonBody(externalDevDependencies);\n\nconst scriptsTemplate = json`\n \"test\": \"vitest run\",\n \"test:watch\": \"vitest\",\n \"lint\": \"eslint --config eslint.config.js --cache --cache-strategy content\",\n \"lint:fix\": \"npm run lint -- --fix\",\n \"tsc\": \"tsc\",\n \"tsc:watch\": \"tsc --watch\",\n \"check-circular-imports\": \"npx dpdm -T ./index.ts\",\n \"generate\": \"ph-cli generate\",\n \"connect\": \"ph-cli connect\",\n \"build\": \"ph-cli build\",\n \"reactor\": \"ph-cli reactor\",\n \"service\": \"ph-cli service\",\n \"vetra\": \"ph-cli vetra\",\n \"service-startup\": \"bash ./node_modules/@powerhousedao/ph-cli/dist/scripts/service-startup.sh\",\n \"service-unstartup\": \"bash ./node_modules/@powerhousedao/ph-cli/dist/scripts/service-unstartup.sh\"\n`.raw;\n\nconst dependenciesTemplate = (versionedDependencies: string[]) =>\n json`\n ${versionedDependencies.join(\",\\n \")},\n${externalDepsTemplate}\n`.raw;\n\nconst devDependenciesTemplate = (versionedDevDependencies: string[]) =>\n json`\n ${versionedDevDependencies.join(\",\\n \")},\n${externalDevDepsTemplate}\n`.raw;\n\nexport const packageJsonTemplate = (\n projectName: string,\n versionedDependencies: string[],\n versionedDevDependencies: string[],\n) =>\n json`\n{\n \"name\": \"${projectName}\",\n \"version\": \"1.0.0\",\n \"license\": \"AGPL-3.0-only\",\n \"type\": \"module\",\n \"files\": [\n \"/dist\"\n ],\n \"sideEffects\": false,\n \"exports\": {\n ${exportsTemplate}\n },\n \"scripts\": {\n ${scriptsTemplate}\n },\n \"dependencies\": {\n ${dependenciesTemplate(versionedDependencies)}\n },\n \"devDependencies\": {\n ${devDependenciesTemplate(versionedDevDependencies)}\n }\n}\n`.raw;\n","export const packageJsonScriptsTemplate = {\n build: \"npm run tsc && npm run tailwind\",\n test: \"vitest run\",\n \"test:watch\": \"vitest\",\n lint: \"eslint --config eslint.config.js --cache --cache-strategy content\",\n \"lint:fix\": \"npm run lint -- --fix\",\n tsc: \"tsc\",\n \"tsc:watch\": \"tsc --watch\",\n tailwind: \"pnpm exec tailwindcss -i ./style.css -o ./dist/style.css\",\n prepublishOnly: \"npm run build\",\n \"check-circular-imports\": \"npx dpdm -T ./index.ts\",\n generate: \"ph-cli generate\",\n connect: \"ph-cli connect\",\n reactor: \"ph-cli reactor\",\n service: \"ph-cli service\",\n vetra: \"ph-cli vetra\",\n migrate: \"ph-cli migrate\",\n \"service-startup\":\n \"bash ./node_modules/@powerhousedao/ph-cli/dist/scripts/service-startup.sh\",\n \"service-unstartup\":\n \"bash ./node_modules/@powerhousedao/ph-cli/dist/scripts/service-unstartup.sh\",\n};\n\nexport const packageJsonExportsTemplate = {\n \".\": {\n types: \"./dist/index.d.ts\",\n default: \"./dist/index.js\",\n },\n \"./document-models\": {\n types: \"./dist/document-models/index.d.ts\",\n default: \"./dist/document-models/index.js\",\n },\n \"./editors\": {\n types: \"./dist/editors/index.d.ts\",\n default: \"./dist/editors/index.js\",\n },\n \"./document-models/*\": {\n types: \"./dist/document-models/*/index.d.ts\",\n default: \"./dist/document-models/*/index.js\",\n },\n \"./editors/*\": {\n types: \"./dist/editors/*/index.d.ts\",\n default: \"./dist/editors/*/index.js\",\n },\n \"./subgraphs\": {\n types: \"./dist/subgraphs/index.d.ts\",\n default: \"./dist/subgraphs/index.js\",\n },\n \"./processors\": {\n types: \"./dist/processors/index.d.ts\",\n default: \"./dist/processors/index.js\",\n },\n \"./manifest\": {\n default: \"./dist/powerhouse.manifest.json\",\n },\n \"./style.css\": \"./dist/style.css\",\n};\n","import { DEFAULT_REGISTRY_URL } from \"@powerhousedao/shared/registry\";\nimport { json } from \"@tmpl/core\";\nfunction makeVetraConfigField(vetraDriveUrl: string | undefined) {\n if (!vetraDriveUrl) return \"\";\n const driveId = vetraDriveUrl.split(\"/\").pop() ?? \"\";\n return json`\n ,\n \"vetra\": {\n \"driveId\": \"${driveId}\",\n \"driveUrl\": \"${vetraDriveUrl}\"\n }\n`.raw;\n}\n\nexport async function buildPowerhouseConfigTemplate(args: {\n tag?: string;\n version?: string;\n remoteDrive?: string;\n}) {\n const vetraConfigField = makeVetraConfigField(args.remoteDrive);\n return json`\n {\n \"documentModelsDir\": \"./document-models\",\n \"editorsDir\": \"./editors\",\n \"processorsDir\": \"./processors\",\n \"subgraphsDir\": \"./subgraphs\",\n \"studio\": {\n \"port\": 3000\n },\n \"reactor\": {\n \"port\": 4001\n },\n \"packages\": [\n ],\n \"packageRegistryUrl\": \"${DEFAULT_REGISTRY_URL}\"${vetraConfigField}\n}\n`.raw;\n}\n","import { json } from \"@tmpl/core\";\n\nexport const ManifestTemplate = (projectName: string) =>\n json`\n{\n \"name\": \"${projectName}\",\n \"description\": \"\",\n \"category\": \"\",\n \"publisher\": {\n \"name\": \"\",\n \"url\": \"\"\n },\n \"documentModels\": [],\n \"editors\": [],\n \"apps\": [],\n \"subgraphs\": [],\n \"importScripts\": [],\n \"config\": []\n}\n\n`.raw;\n","import { md } from \"@tmpl/core\";\n\nexport const readmeTemplate = md`\n# Document Model Boilerplate\n\nThis Document Model Boilerplate provides code generation for scaffolding editors and models.\nIt ensures compatibility with host applications like Connect and the Reactors for seamless document model and editor integration.\n\n## Standard Document Model Workflow with help of the boilerplate.\n\nThis tutorial will guide you through the process of creating a new document model using the Document Model Editor in the Connect app.\n\n<details>\n<summary>Available NPM commands</summary>\n\n- \\`generate\\`: Updates the generated code according to the JSON spec and GraphQL schema of your document model, made in Connect.\n- \\`lint\\`: Checks for errors with ESLint and TypeScript checking.\n- \\`format\\`: Formats the code using Prettier.\n- \\`build\\`: Builds the library project using Vite.\n- \\`storybook\\`: Starts Storybook in development mode.\n- \\`build-storybook\\`: Builds Storybook.\n- \\`test\\`: Runs Jest for testing.\n\n</details>\n\n### 1. Defining Your Document Model GraphQL Schema\n\nStart by creating your own 'Powerhouse Project' (Document model + editor).\n\nStep 1: Run the following command to set up your project inside this directory:\n\n~~~bash\nnpm create document-model-lib\n~~~\n\nStep 2: Use the Document Model Editor in the Connect app\n\nThe following command gives you access to all the powerhouse CLI tools available, install it globally if you are a poweruser.\n\n~~~bash\nnpm install ph-cmd\n~~~\n\nNow you are able to launch Connect in Studio Mode (Locally):\n\n~~~bash\nnpm run connect\n~~~\n\nOpen the 'Document Model' creator at the bottom of connect to define your document mode with it's GraphQL Schema Definition.\nThis schema will define the structure and fields for your document model using GraphQL.\nFollow one of our tutorials on Academy to get familiar with the process.\n\n### 2. Defining Document Model Operations\n\nUsing the Document Model Operations Editor, define the operations for your document model and their GraphQL counterparts.\nThese operations will handle state changes within your document model.\n\n**Best Practices:**\n\n- Clearly define CRUD operations (Create, Read, Update, Delete).\n- Use GraphQL input types to specify the parameters for each operation.\n- Ensure that operations align with user intent to maintain a clean and understandable API.\n\n### 3. Generating Scaffolding Code\n\nExport your document model as a .zip file from Connect.\nImport the .zip file into your project directory created in Step 1.\nRun the following command to generate the scaffolding code:\n\n~~~bash\nnpm run generate YourModelName.phdm.zip\n~~~\n\nThis will create a new directory under /document-models containing:\n\nJSON file with the document model specification.\nGraphQL file with state and operation schemas.\nA gen/ folder with autogenerated code.\nA src/ folder for your custom code implementation.\n\n### 4. Implementing Reducer Code and Unit Tests\n\nNavigate to the reducer directory:\n\n~~~bash\ncd document-models/\"YourModelName\"/src/reducers\n~~~\n\nImplement the reducer functions for each document model operation. These functions will handle state transitions.\n\nAdd utility functions in:\n\n~~~bash\ndocument-models/\"YourModelName\"/src/utils.ts\n~~~\n\nWrite unit tests to ensure the correctness of your reducers:\n\nTest files should be located in:\n\n~~~bash\ndocument-models/\"YourModelName\"/src/reducers/tests\n~~~\n\nRun the tests:\n\n~~~bash\nnpm test\n~~~\n\nTest the editor functionality:\n\n~~~bash\nnpm run connect\n~~~\n\n### 5. Implementing Document Editors\n\nGenerate the editor template for your document model:\n\n~~~bash\nnpm run generate -- --editor YourModelName --document-types powerhouse/YourModelName\n~~~\n\nThe --editor flag specifies the name of your document model.\nThe --document-types flag links the editor to your document model type.\nAfter generation:\n\nOpen the editor template:\n\n~~~bash\neditors/YourModelName/editor.tsx\n~~~\n\nCustomize the editor interface to suit your document model.\n\n### 6. Testing the Document Editor\n\nRun the Connect app to test your document editor:\n\n~~~bash\nnpm run connect\n~~~\n\nVerify that the editor functions as expected.\nPerform end-to-end testing to ensure smooth integration between the document model and its editor.\n\n### 7. Adding a Manifest File\n\nCreate a manifest file to describe your document model and editor. This enables proper integration with the host application.\n\n**Example manifest.json:**\n\n~~~json\n{\n \"name\": \"your-model-name\",\n \"description\": \"A brief description of your document model.\",\n \"category\": \"your-category\", // e.g., \"Finance\", \"People Ops\", \"Legal\"\n \"publisher\": {\n \"name\": \"your-publisher-name\",\n \"url\": \"your-publisher-url\"\n },\n \"documentModels\": [\n {\n \"id\": \"your-model-id\",\n \"name\": \"your-model-name\"\n }\n ],\n \"editors\": [\n {\n \"id\": \"your-editor-id\",\n \"name\": \"your-editor-name\",\n \"documentTypes\": [\"your-model-id\"]\n }\n ]\n}\n~~~\n\n### Steps to finalize:\n\nPlace the manifest file at your project root.\nUpdate your index.js to export your modules and include the new document model and editor.\n\n### Final Thoughts\n\nYou've now successfully created a Document Model and its corresponding Editor using the Connect app!\n\nNext Steps:\n\n- Expand functionality: Add more operations or complex logic to your document model.\n- Improve UX: Enhance the document editor for a smoother user experience.\n- Integrate with other systems: Use APIs or GraphQL to connect your document model with external services.\n`.raw;\n","import { css } from \"@tmpl/core\";\n\nexport const styleTemplate = css`\n @import \"tailwindcss\";\n @import \"@powerhousedao/design-system/theme.css\";\n @import \"@powerhousedao/connect/style.css\";\n\n @theme {\n /* You can customize the theme by overriding the theme variables here */\n /* See https://tailwindcss.com/docs/theme#using-a-custom-theme for details */\n\n /* If you would prefer to use plain css, add your styles below this @theme tag as you normally would */\n\n /* A sensible CSS reset is applied by default. If you would prefer to use a different reset or none at all, uncomment the following line */\n\n /* --*: initial; */\n }\n`.raw;\n","export const subgraphsIndexTemplate = `\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\n`;\n","import { json } from \"@tmpl/core\";\n\nexport const tsconfigPathsTemplate = json`\n\"document-models\": [\n \"./document-models/index.ts\"\n],\n\"document-models/*\": [\n \"./document-models/*/index.ts\"\n],\n\"editors\": [\n \"./editors/index.ts\"\n],\n\"editors/*\": [\n \"./editors/*/index.ts\"\n],\n\"processors/*\": [\n \"./processors/*/index.ts\"\n],\n\"subgraphs\": [\n \"./subgraphs/index.ts\"\n],\n\"subgraphs/*\": [\n \"./subgraphs/*/index.ts\"\n]`.raw;\n\nexport const tsConfigTemplate = json`\n{\n \"compilerOptions\": {\n \"outDir\": \"./dist\",\n \"rootDir\": \".\",\n // paths for easy access to project modules\n \"paths\": {\n ${tsconfigPathsTemplate}\n },\n \"module\": \"nodenext\",\n \"moduleDetection\": \"force\",\n \"target\": \"esnext\",\n \"jsx\": \"react-jsx\",\n \"types\": [\"node\", \"vite/client\", \"vitest/globals\"],\n \"lib\": [\"ESNext\", \"dom\", \"dom.iterable\"],\n \"declaration\": true,\n \"declarationMap\": true,\n \"emitDeclarationOnly\": true,\n \"strict\": true,\n \"verbatimModuleSyntax\": true,\n \"isolatedModules\": true,\n \"noUncheckedSideEffectImports\": true,\n \"skipLibCheck\": true\n },\n \"include\": [\"**/*\", \"./powerhouse.manifest.json\"],\n \"exclude\": [\"dist\", \"node_modules\", \".ph\", \"vitest.config.ts\"]\n}\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\nexport const vitestConfigTemplate = ts`\nimport { defineConfig } from \"vitest/config\";\nimport react from \"@vitejs/plugin-react\";\nimport tsconfigPaths from \"vite-tsconfig-paths\";\n\nexport default defineConfig({\n test: {\n globals: true,\n },\n plugins: [tsconfigPaths(), react()],\n});\n\n`.raw;\n","import type { CommandHelpInfo, HelpTopic } from \"@powerhousedao/codegen\";\nimport { capitalCase, kebabCase } from \"change-case\";\nfunction groupHelpTopicsByCategory(helpTopics: HelpTopic[]) {\n const helpTopicsByCategory: Record<string, HelpTopic[] | undefined> = {};\n\n for (const helpTopic of helpTopics) {\n if (!helpTopicsByCategory[helpTopic.category]) {\n helpTopicsByCategory[helpTopic.category] = [helpTopic];\n } else {\n helpTopicsByCategory[helpTopic.category]?.push(helpTopic);\n }\n }\n\n return helpTopicsByCategory;\n}\n\nfunction makeTableOfContents(commandsHelpInfo: CommandHelpInfo[]) {\n const commandNames = commandsHelpInfo.map(({ name }) => name);\n const tableOfContentsEntries: string[] = [];\n\n for (const name of commandNames) {\n tableOfContentsEntries.push(\n `- [${capitalCase(name)}](#${kebabCase(name)})\\n`,\n );\n }\n\n return tableOfContentsEntries.join(\"\");\n}\n\nfunction makeDefaultsDescriptors(defaults: string[]) {\n const withoutOptional = defaults.filter(\n (d) => d !== \"optional\" && d !== \"[...optional]\",\n );\n const formatted = withoutOptional.map((d) => {\n const [label, ...rest] = d.split(\":\").map((s) => s.trim());\n return `**${label}**: \\`${rest.join(\"\")}\\``;\n });\n return formatted;\n}\n\nfunction makeRequiredDescriptor(defaults: string[]) {\n if (\n defaults.includes(\"optional\") ||\n defaults.includes(\"[...optional]\") ||\n defaults.some((d) => d.includes(\"default\"))\n )\n return \"\";\n return \"*[required]*\";\n}\n\nfunction makeHeadingFromUsage(usage: string) {\n if (usage.includes(\"--\")) {\n const usageAsWords = capitalCase(usage.split(\" \")[0].replace(\"--\", \"\"));\n return `#### ${usageAsWords}`;\n }\n const usageWithoutBrackets = capitalCase(\n usage.replace(\"[\", \"\").replace(\"]\", \"\"),\n );\n return `#### ${usageWithoutBrackets}`;\n}\nfunction makeCommandHelpTopicDocs(helpTopic: HelpTopic) {\n const { defaults, description, usage } = helpTopic;\n const heading = makeHeadingFromUsage(usage);\n const requiredDescriptor = makeRequiredDescriptor(defaults);\n const defaultsDescriptors = makeDefaultsDescriptors(defaults);\n\n return `${heading} ${requiredDescriptor}<br>\n${description}<br><br>\n**usage:** \\`${usage}\\`<br>\n${defaultsDescriptors.join(\"<br>\")}\n`;\n}\n\nfunction makeCommandHelpTopicsDocs(helpTopics: HelpTopic[]) {\n return helpTopics.map(makeCommandHelpTopicDocs).join(\"\");\n}\n\nfunction makeCommandHelpTopicsDocsForCategories(helpTopics: HelpTopic[]) {\n const helpTopicsByCategory = groupHelpTopicsByCategory(helpTopics);\n const helpTopicsDocs: string[] = [];\n\n for (const [category, helpTopics] of Object.entries(helpTopicsByCategory)) {\n const helpTopicDocs = makeCommandHelpTopicsDocs(helpTopics ?? []);\n\n helpTopicsDocs.push(\n `### ${category}\n${helpTopicDocs}\n`,\n );\n }\n\n return helpTopicsDocs.join(\"\");\n}\n\nfunction makeCommandDoc(commandHelpInfo: CommandHelpInfo) {\n const { name, description, helpTopics } = commandHelpInfo;\n return `## ${capitalCase(name)}\n${description}\n${makeCommandHelpTopicsDocsForCategories(helpTopics)}`;\n}\n\nfunction makeCommandDocs(commandsHelpInfo: CommandHelpInfo[]) {\n return commandsHelpInfo.map(makeCommandDoc).join(\"\");\n}\n\nexport const docsFromCliHelpTemplate = (v: {\n cliDescription: string;\n commandsHelpInfo: CommandHelpInfo[];\n docsTitle: string;\n docsIntroduction: string;\n}) =>\n `# ${v.docsTitle}<br>\n${v.docsIntroduction}<br><br>\n${v.cliDescription}<br>\n## Table of Contents\n${makeTableOfContents(v.commandsHelpInfo)}<br>\n${makeCommandDocs(v.commandsHelpInfo)}\n`;\n","import type { EditorVariableNames } from \"@powerhousedao/codegen\";\nimport { tsx } from \"@tmpl/core\";\n\nexport const documentEditorEditorFileTemplate = (\n v: EditorVariableNames & {\n documentModelImportPath: string;\n },\n) =>\n tsx`\nimport { DocumentStateViewer, DocumentToolbar } from \"@powerhousedao/design-system/connect\";\nimport { ${v.useSelectedDocumentHookName}, actions } from \"${v.documentModelImportPath}\";\n\nexport default function Editor() {\n const [document, dispatch] = ${v.useSelectedDocumentHookName}();\n\n const handleSetName = (name: string) => {\n // 'actions' contains all available actions for this document type\n dispatch(actions.setName(name));\n };\n\n return (\n <div className=\"mx-auto max-w-4xl bg-gray-50 p-6\">\n <DocumentToolbar />\n\n {/* \"ph-default-styles\" sets default styles for basic UI elements */}\n <div className=\"ph-default-styles\">\n {/* Edit document name */}\n <label className=\"my-6\">\n <h3>Document Name</h3>\n <input\n type=\"text\"\n defaultValue={document.header.name}\n placeholder=\"Enter document name...\"\n title=\"Edit document name and click outside to save.\"\n autoFocus\n onBlur={(e) => handleSetName(e.target.value.trim())}\n onKeyDown={(e) => {\n if (e.key === \"Enter\") {\n e.currentTarget.blur();\n }\n }}\n className=\"font-semibold\"\n />\n </label>\n <hr />\n\n {/* Document header info */}\n <div className=\"mb-6 grid grid-cols-2 gap-x-8\">\n <label>\n <h3 className=\"text-base\">ID</h3>\n <input\n type=\"text\"\n value={document.header.id}\n readOnly\n className=\"font-mono\"\n />\n </label>\n <label>\n <h3 className=\"text-base\">Created</h3>\n <input\n type=\"text\"\n value={new Date(document.header.createdAtUtcIso).toLocaleString()}\n readOnly\n />\n </label>\n <label>\n <h3 className=\"text-base\">Type</h3>\n <input type=\"text\" value={document.header.documentType} readOnly />\n </label>\n <label>\n <h3 className=\"text-base\">Last Modified</h3>\n <input\n type=\"text\"\n value={new Date(\n document.header.lastModifiedAtUtcIso,\n ).toLocaleString()}\n readOnly\n />\n </label>\n </div>\n\n {/* Document state */}\n <div className=\"mt-6\">\n <h3 className=\"text-base\">Document State</h3>\n <DocumentStateViewer state={document.state} />\n </div>\n </div>\n </div>\n );\n}\n`.raw;\n","import { tsx } from \"@tmpl/core\";\n\nexport const documentEditorModuleFileTemplate = (v: {\n editorName: string;\n pascalCaseEditorName: string;\n editorId: string;\n documentTypes: string;\n}) =>\n tsx`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nimport type { EditorModule } from \"document-model\";\nimport { lazy } from \"react\";\n\n/** Document editor module for the \"${v.documentTypes}\" document type */\nexport const ${v.pascalCaseEditorName}: EditorModule = {\n Component: lazy(() => import(\"./editor.js\")),\n documentTypes: ${v.documentTypes},\n config: {\n id: \"${v.editorId}\",\n name: \"${v.editorName}\",\n },\n};\n`.raw;\n","import type { ModuleSpecification } from \"@powerhousedao/shared\";\nimport { ts } from \"@tmpl/core\";\nimport { camelCase } from \"change-case\";\nimport type { DocumentModelFileMakerArgs } from \"file-builders\";\n\nfunction buildModuleActionsName(\n module: ModuleSpecification,\n camelCaseDocumentType: string,\n) {\n const camelCaseModuleName = camelCase(module.name);\n return `${camelCaseDocumentType}${camelCaseModuleName.charAt(0).toUpperCase()}${camelCaseModuleName.slice(1)}Actions`;\n}\n\nfunction buildModuleActionsNames(\n modules: ModuleSpecification[],\n camelCaseDocumentType: string,\n) {\n return modules.map((m) => buildModuleActionsName(m, camelCaseDocumentType));\n}\n\nfunction buildModuleActionsImports(\n modules: ModuleSpecification[],\n camelCaseDocumentType: string,\n) {\n const actionNames = buildModuleActionsNames(\n modules,\n camelCaseDocumentType,\n ).join(\",\\n\");\n return `import { ${actionNames} } from \"./gen/creators.js\";`;\n}\n\nfunction buildModuleActionsSpreadExport(\n modules: ModuleSpecification[],\n camelCaseDocumentType: string,\n) {\n const spreadActionNames = buildModuleActionsNames(\n modules,\n camelCaseDocumentType,\n )\n .map((n) => `...${n}`)\n .join(\",\\n\");\n return `\nexport const actions = { ...baseActions, ${spreadActionNames} }`;\n}\nexport const documentModelRootActionsFileTemplate = (\n v: DocumentModelFileMakerArgs,\n) =>\n ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nimport { baseActions } from \"document-model\";\n${buildModuleActionsImports(v.specification.modules, v.camelCaseDocumentType)}\n\n/** Actions for the ${v.pascalCaseDocumentType} document model */\n${buildModuleActionsSpreadExport(v.specification.modules, v.camelCaseDocumentType)}\n`.raw;\n","import type { ModuleSpecification } from \"@powerhousedao/shared\";\nimport { ts } from \"@tmpl/core\";\nimport { kebabCase, pascalCase } from \"change-case\";\nimport type { DocumentModelFileMakerArgs } from \"file-builders\";\n\nfunction makeModuleActionsTypeImport(\n module: ModuleSpecification,\n pascalCaseDocumentType: string,\n) {\n const pascalCaseModuleName = pascalCase(module.name);\n const kebabCaseModuleName = kebabCase(module.name);\n return `import type { ${pascalCaseDocumentType}${pascalCaseModuleName}Action } from \"./${kebabCaseModuleName}/actions.js\";`;\n}\nfunction makeModuleActionsTypeImports(\n modules: ModuleSpecification[],\n pascalCaseDocumentType: string,\n) {\n return modules\n .map((module) =>\n makeModuleActionsTypeImport(module, pascalCaseDocumentType),\n )\n .join(\"\\n\");\n}\n\nfunction makeModuleActionsTypeExport(module: ModuleSpecification) {\n const kebabCaseModuleName = kebabCase(module.name);\n return `export * from \"./${kebabCaseModuleName}/actions.js\";`;\n}\n\nfunction makeModuleActionsTypeExports(modules: ModuleSpecification[]) {\n return modules.map(makeModuleActionsTypeExport).join(\"\\n\");\n}\n\nfunction makeModuleActionTypeName(\n module: ModuleSpecification,\n pascalCaseDocumentType: string,\n) {\n const pascalCaseModuleName = pascalCase(module.name);\n return `${pascalCaseDocumentType}${pascalCaseModuleName}Action`;\n}\n\nfunction makeModuleActionTypesUnion(\n modules: ModuleSpecification[],\n pascalCaseDocumentType: string,\n) {\n return modules\n .map((module) => makeModuleActionTypeName(module, pascalCaseDocumentType))\n .join(\"|\\n\");\n}\n\nfunction makeDocumentActionType(\n modules: ModuleSpecification[],\n pascalCaseDocumentType: string,\n) {\n const actionTypeUnion = makeModuleActionTypesUnion(\n modules,\n pascalCaseDocumentType,\n );\n return `export type ${pascalCaseDocumentType}Action = ${actionTypeUnion};`;\n}\nexport const documentModelGenActionsFileTemplate = (\n v: DocumentModelFileMakerArgs,\n) =>\n ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\n${makeModuleActionsTypeImports(v.specification.modules, v.pascalCaseDocumentType)}\n\n${makeModuleActionsTypeExports(v.specification.modules)}\n\n${makeDocumentActionType(v.specification.modules, v.pascalCaseDocumentType)}\n`.raw;\n","import { ts } from \"@tmpl/core\";\nimport type { DocumentModelFileMakerArgs } from \"file-builders\";\n\nexport const documentModelGenControllerFileTemplate = (\n v: DocumentModelFileMakerArgs,\n) =>\n ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nimport { PHDocumentController } from \"document-model\";\nimport { ${v.pascalCaseDocumentType} } from \"../module.js\";\nimport type { ${v.actionTypeName}, ${v.phStateName} } from \"./types.js\";\n\nexport const ${v.pascalCaseDocumentType}Controller = PHDocumentController.forDocumentModel<\n ${v.phStateName},\n ${v.actionTypeName}\n>(${v.pascalCaseDocumentType});\n`.raw;\n","import type { ModuleSpecification } from \"@powerhousedao/shared\";\nimport { ts } from \"@tmpl/core\";\nimport { camelCase, kebabCase } from \"change-case\";\nimport type { DocumentModelFileMakerArgs } from \"file-builders\";\n\nfunction buildModuleCreatorsExport(\n module: ModuleSpecification,\n camelCaseDocumentType: string,\n) {\n const kebabCaseModuleName = kebabCase(module.name);\n const camelCaseModuleName = camelCase(module.name);\n const namespaceName = `${camelCaseDocumentType}${camelCaseModuleName.charAt(0).toUpperCase()}${camelCaseModuleName.slice(1)}Actions`;\n const moduleCreatorsExport = `export * from \"./${kebabCaseModuleName}/creators.js\";`;\n const moduleCreatorsNamespaceExport = `export * as ${namespaceName} from \"./${kebabCaseModuleName}/creators.js\";`;\n\n return [moduleCreatorsExport, moduleCreatorsNamespaceExport];\n}\nfunction buildCreatorsExports(\n modules: ModuleSpecification[],\n camelCaseDocumentType: string,\n) {\n return modules\n .flatMap((module) =>\n buildModuleCreatorsExport(module, camelCaseDocumentType),\n )\n .join(\"\\n\");\n}\nexport const documentModelGenCreatorsFileTemplate = (\n v: DocumentModelFileMakerArgs,\n) =>\n ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\n${buildCreatorsExports(v.specification.modules, v.camelCaseDocumentType)}\n`.raw;\n","import { ts } from \"@tmpl/core\";\nimport type { DocumentModelFileMakerArgs } from \"file-builders\";\n\nexport const documentModelDocumentSchemaFileTemplate = (\n v: DocumentModelFileMakerArgs,\n) =>\n ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nimport {\n BaseDocumentHeaderSchema,\n BaseDocumentStateSchema,\n} from \"document-model\";\nimport { z } from \"zod\";\nimport { ${v.documentTypeVariableName} } from \"./document-type.js\";\nimport { ${v.stateSchemaName} } from \"./schema/zod.js\";\nimport type { ${v.phDocumentTypeName}, ${v.phStateName} } from \"./types.js\";\n\n/** Schema for validating the header object of a ${v.pascalCaseDocumentType} document */\nexport const ${v.phDocumentTypeName}HeaderSchema = BaseDocumentHeaderSchema.extend({\n documentType: z.literal(${v.documentTypeVariableName}),\n});\n\n/** Schema for validating the state object of a ${v.pascalCaseDocumentType} document */\nexport const ${v.phStateName}Schema = BaseDocumentStateSchema.extend({\n global: ${v.stateSchemaName}(),\n});\n\nexport const ${v.phDocumentSchemaName} = z.object({\n header: ${v.phDocumentTypeName}HeaderSchema,\n state: ${v.phStateName}Schema,\n initialState: ${v.phStateName}Schema,\n});\n\n/** Simple helper function to check if a state object is a ${v.pascalCaseDocumentType} document state object */\nexport function ${v.isPhStateOfTypeFunctionName}(\n state: unknown,\n): state is ${v.phStateName} {\n return ${v.phStateName}Schema.safeParse(state).success;\n}\n\n/** Simple helper function to assert that a document state object is a ${v.pascalCaseDocumentType} document state object */\nexport function ${v.assertIsPhStateOfTypeFunctionName}(\n state: unknown,\n): asserts state is ${v.phStateName} {\n ${v.phStateName}Schema.parse(state);\n}\n\n/** Simple helper function to check if a document is a ${v.pascalCaseDocumentType} document */\nexport function ${v.isPhDocumentOfTypeFunctionName}(\n document: unknown,\n): document is ${v.phDocumentTypeName} {\n return ${v.phDocumentSchemaName}.safeParse(document).success;\n}\n\n/** Simple helper function to assert that a document is a ${v.pascalCaseDocumentType} document */\nexport function ${v.assertIsPhDocumentOfTypeFunctionName}(\n document: unknown,\n): asserts document is ${v.phDocumentTypeName} {\n ${v.phDocumentSchemaName}.parse(document);\n}\n`.raw;\n","import { ts } from \"@tmpl/core\";\nimport type { DocumentModelFileMakerArgs } from \"file-builders\";\n\nexport const documentModelDocumentTypeTemplate = (\n v: DocumentModelFileMakerArgs,\n) =>\n ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nexport const ${v.documentTypeVariableName} = \"${v.documentModelState.id}\";\n`.raw;\n","import type { ModuleSpecification } from \"@powerhousedao/shared\";\nimport { ts } from \"@tmpl/core\";\nimport { kebabCase } from \"change-case\";\nimport type { DocumentModelFileMakerArgs } from \"file-builders\";\n\nfunction buildModuleOperationsExports(module: ModuleSpecification) {\n const moduleDirName = kebabCase(module.name);\n return `export * from \"./${moduleDirName}/operations.js\";`;\n}\n\nfunction buildModulesOperationsExports(modules: ModuleSpecification[]) {\n return modules.map(buildModuleOperationsExports).join(\"\\n\");\n}\n\nexport const documentModelGenIndexFileTemplate = (\n v: DocumentModelFileMakerArgs,\n) =>\n ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nexport * from './actions.js';\nexport * from './document-model.js';\nexport * from './types.js';\nexport * from './creators.js';\nexport {\n create${v.phDocumentTypeName},\n createState,\n defaultPHState,\n defaultGlobalState,\n defaultLocalState,\n} from './ph-factories.js';\nexport * from \"./utils.js\";\nexport * from \"./reducer.js\";\nexport * from \"./controller.js\";\nexport * from \"./schema/index.js\";\nexport * from \"./document-type.js\";\nexport * from \"./document-schema.js\";\n${buildModulesOperationsExports(v.specification.modules)}\n`.raw;\n","import type { OperationSpecification } from \"@powerhousedao/shared\";\nimport { constantCase, pascalCase } from \"change-case\";\nimport {\n operationHasInput,\n type DocumentModelModuleFileMakerArgs,\n} from \"file-builders\";\nexport function getActionTypeName(operation: OperationSpecification) {\n if (!operation.name) return;\n return `${pascalCase(operation.name)}Action`;\n}\n\nexport function getActionInputName(operation: OperationSpecification) {\n if (!operation.name) return;\n if (!operationHasInput(operation)) return;\n return `${pascalCase(operation.name)}Input`;\n}\n\nexport function getActionType(operation: OperationSpecification) {\n if (!operation.name) return;\n return constantCase(operation.name);\n}\n\nexport function getActionInputTypeNames(\n args: DocumentModelModuleFileMakerArgs,\n) {\n return args.module.operations.map(getActionInputName).join(\",\\n\");\n}\n","import type { DocumentModelDocumentTypeMetadata } from \"@powerhousedao/codegen\";\nimport type {\n DocumentModelGlobalState,\n DocumentSpecification,\n ModuleSpecification,\n OperationErrorSpecification,\n OperationSpecification,\n} from \"@powerhousedao/shared\";\nimport { camelCase, kebabCase, pascalCase } from \"change-case\";\n\nexport function getEditorVariableNames({\n documentModelDocumentTypeName,\n}: DocumentModelDocumentTypeMetadata) {\n return {\n documentModelDocumentTypeName,\n documentVariableName: camelCase(documentModelDocumentTypeName),\n useSelectedDocumentHookName: `useSelected${documentModelDocumentTypeName}`,\n documentNameVariableName: `${documentModelDocumentTypeName}Name`,\n dispatchFunctionName: \"dispatch\",\n onClickEditHandlerName: `onClickEdit${documentModelDocumentTypeName}Name`,\n onCancelEditHandlerName: `onClickCancelEdit${documentModelDocumentTypeName}Name`,\n setNameActionName: \"setName\",\n isEditingVariableName: \"isEditing\",\n setIsEditingFunctionName: \"setIsEditing\",\n onSubmitSetNameFunctionName: `onSubmitSet${documentModelDocumentTypeName}Name`,\n };\n}\n\nexport function getDocumentModelDirName(\n documentModelState: DocumentModelGlobalState,\n existingDirName?: string,\n) {\n if (existingDirName) return existingDirName;\n return kebabCase(documentModelState.name);\n}\n\nexport function getLatestDocumentModelSpec({\n specifications,\n}: DocumentModelGlobalState) {\n return specifications[specifications.length - 1];\n}\n\nexport function getDocumentModelSpecByVersionNumber(\n { specifications }: DocumentModelGlobalState,\n version: number,\n): DocumentSpecification {\n const specificationByIndex = specifications[version];\n const specificationByNumber = specifications.find(\n (spec) => spec.version === version,\n );\n if (!specificationByNumber) {\n console.error(\n `Specification with version number ${version} does not exist in the document model specifications array`,\n );\n return specificationByIndex;\n }\n if (specificationByIndex.version !== specificationByNumber.version) {\n console.error(\n `Specification with version ${version} does not match specifications array at index ${version}`,\n );\n return specificationByIndex;\n }\n\n return specificationByNumber;\n}\n\nexport function getLatestDocumentModelSpecVersionNumber(\n documentModelState: DocumentModelGlobalState,\n) {\n return getLatestDocumentModelSpec(documentModelState).version;\n}\n\nexport function getDocumentModelVariableNames(documentModelName: string) {\n const kebabCaseDocumentType = kebabCase(documentModelName);\n const pascalCaseDocumentType = pascalCase(documentModelName);\n const camelCaseDocumentType = camelCase(documentModelName);\n const documentTypeVariableName = `${camelCaseDocumentType}DocumentType`;\n const stateName = `${pascalCaseDocumentType}State`;\n const globalStateName = `${pascalCaseDocumentType}GlobalState`;\n const localStateName = `${pascalCaseDocumentType}LocalState`;\n const phStateName = `${pascalCaseDocumentType}PHState`;\n const phDocumentTypeName = `${pascalCaseDocumentType}Document`;\n const actionTypeName = `${pascalCaseDocumentType}Action`;\n const actionsTypeName = `${actionTypeName}s`;\n const actionsName = camelCase(actionsTypeName);\n const upgradeManifestName = `${camelCaseDocumentType}UpgradeManifest`;\n\n const stateSchemaName = `${stateName}Schema`;\n const phDocumentSchemaName = `${phDocumentTypeName}Schema`;\n const isPhStateOfTypeFunctionName = `is${stateName}`;\n const assertIsPhStateOfTypeFunctionName = `assertIs${stateName}`;\n const isPhDocumentOfTypeFunctionName = `is${phDocumentTypeName}`;\n const assertIsPhDocumentOfTypeFunctionName = `assertIs${phDocumentTypeName}`;\n const useByIdHookName = `use${phDocumentTypeName}ById`;\n const useSelectedHookName = `useSelected${phDocumentTypeName}`;\n const useInSelectedDriveHookName = `use${phDocumentTypeName}sInSelectedDrive`;\n const useInSelectedFolderHookName = `use${phDocumentTypeName}sInSelectedFolder`;\n\n return {\n kebabCaseDocumentType,\n pascalCaseDocumentType,\n camelCaseDocumentType,\n documentTypeVariableName,\n upgradeManifestName,\n stateName,\n globalStateName,\n localStateName,\n phStateName,\n phDocumentTypeName,\n actionTypeName,\n actionsTypeName,\n actionsName,\n stateSchemaName,\n phDocumentSchemaName,\n isPhStateOfTypeFunctionName,\n assertIsPhStateOfTypeFunctionName,\n isPhDocumentOfTypeFunctionName,\n assertIsPhDocumentOfTypeFunctionName,\n useByIdHookName,\n useSelectedHookName,\n useInSelectedDriveHookName,\n useInSelectedFolderHookName,\n };\n}\n\nfunction makeNormalizedError(error: OperationErrorSpecification) {\n if (!error.name) {\n throw new Error(\"Error name is required\");\n }\n const code = error.code || pascalCase(error.name);\n return {\n ...error,\n code,\n };\n}\n\nfunction getErrorsFromOperation(operation: OperationSpecification) {\n const errors = operation.errors;\n const errorCodeSet = new Set<string>();\n const normalizedErrors: OperationErrorSpecification[] = [];\n\n for (const error of errors) {\n const normalizedError = makeNormalizedError(error);\n if (!errorCodeSet.has(normalizedError.code)) {\n errorCodeSet.add(normalizedError.code);\n normalizedErrors.push(normalizedError);\n } else {\n console.warn(\n `Warning: Duplicate error code \"${error.code}\" with different fields found`,\n );\n }\n }\n\n return normalizedErrors;\n}\n\nfunction getErrorsFromOperations(operations: OperationSpecification[]) {\n return operations.flatMap(getErrorsFromOperation);\n}\n","import type { DocumentModelModuleFileMakerArgs } from \"@powerhousedao/codegen\";\nimport type { OperationSpecification } from \"@powerhousedao/shared\";\nimport { ts } from \"@tmpl/core\";\nimport { pascalCase } from \"change-case\";\nimport { operationHasAttachment } from \"file-builders\";\nimport {\n getActionInputName,\n getActionInputTypeNames,\n getActionType,\n getActionTypeName,\n} from \"name-builders\";\n\nfunction getActionTypeExport(operation: OperationSpecification) {\n const baseActionTypeName = operationHasAttachment(operation)\n ? \"ActionWithAttachment\"\n : \"Action\";\n const actionTypeName = getActionTypeName(operation);\n const actionInputName = getActionInputName(operation) ?? `\"{}\"`;\n const actionType = getActionType(operation);\n\n return ts`export type ${actionTypeName} = ${baseActionTypeName} & { type: \"${actionType}\"; input: ${actionInputName} };`\n .raw;\n}\n\nfunction getActionTypeExports(args: DocumentModelModuleFileMakerArgs) {\n return args.module.operations.map(getActionTypeExport).join(\"\\n\");\n}\n\nexport function getModuleExportType(args: DocumentModelModuleFileMakerArgs) {\n const { pascalCaseDocumentType, module } = args;\n const actionTypeNames = module.operations.map(getActionTypeName).join(\" |\\n\");\n return ts`export type ${pascalCaseDocumentType}${pascalCase(module.name)}Action = ${actionTypeNames};`\n .raw;\n}\n\nfunction getDocumentModelActionTypeImportNames(\n args: DocumentModelModuleFileMakerArgs,\n) {\n const actionTypeImports = [\"Action\"];\n const anyActionHasAttachment = args.module.operations.some((a) =>\n operationHasAttachment(a),\n );\n if (anyActionHasAttachment) {\n actionTypeImports.push(\"ActionWithAttachment\");\n }\n return actionTypeImports.join(\",\\n\");\n}\nexport const documentModelOperationModuleActionsFileTemplate = (\n v: DocumentModelModuleFileMakerArgs,\n) =>\n ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nimport type { ${getDocumentModelActionTypeImportNames(v)} } from 'document-model';\nimport type {\n ${getActionInputTypeNames(v)}\n} from '../types.js';\n\n${getActionTypeExports(v)}\n\n${getModuleExportType(v)}\n`.raw;\n","import type { DocumentModelModuleFileMakerArgs } from \"@powerhousedao/codegen\";\nimport type { OperationSpecification } from \"@powerhousedao/shared\";\nimport { ts } from \"@tmpl/core\";\nimport { camelCase, constantCase, pascalCase } from \"change-case\";\nimport { operationHasAttachment, operationHasEmptyInput } from \"file-builders\";\nimport { isTruthy } from \"remeda\";\n\nfunction makeDocumentModelTypeImports(args: DocumentModelModuleFileMakerArgs) {\n const actionTypeImports = [\"createAction\"];\n const anyActionHasAttachment = args.module.operations.some((a) =>\n operationHasAttachment(a),\n );\n if (anyActionHasAttachment) {\n actionTypeImports.push(\"AttachmentInput\");\n }\n return actionTypeImports.join(\",\\n\");\n}\n\nfunction makeActionInputSchemaName(action: OperationSpecification) {\n if (!action.name || !action.schema) return;\n const pascalCaseActionName = pascalCase(action.name);\n return `${pascalCaseActionName}InputSchema`;\n}\n\nfunction makeActionInputTypeName(action: OperationSpecification) {\n if (!action.name || !action.schema) return;\n const pascalCaseActionName = pascalCase(action.name);\n return `${pascalCaseActionName}Input`;\n}\n\nfunction makeActionTypeName(action: OperationSpecification) {\n if (!action.name || !action.schema) return;\n return `${pascalCase(action.name)}Action`;\n}\n\nfunction makeActionInputSchemaImports(args: DocumentModelModuleFileMakerArgs) {\n return args.module.operations\n .map(makeActionInputSchemaName)\n .filter(Boolean)\n .join(\",\\n\");\n}\n\nfunction makeActionInputTypeImports(args: DocumentModelModuleFileMakerArgs) {\n return args.module.operations\n .map(makeActionInputTypeName)\n .filter(Boolean)\n .join(\",\\n\");\n}\n\nfunction makeActionTypeImports(args: DocumentModelModuleFileMakerArgs) {\n return args.module.operations.map(makeActionTypeName).join(\",\\n\");\n}\n\nfunction makeActionCreatorWithInput(operation: OperationSpecification) {\n if (!operation.name || !operation.schema) return;\n const camelCaseActionName = camelCase(operation.name);\n const constantCaseActionName = constantCase(operation.name);\n const actionTypeName = makeActionTypeName(operation);\n const inputSchemaName = makeActionInputSchemaName(operation)!;\n const inputTypeName = makeActionInputTypeName(operation)!;\n const hasAttachment = operationHasAttachment(operation);\n const isEmptyInput = operationHasEmptyInput(operation);\n const inputArg = isEmptyInput\n ? `input: ${inputTypeName} = {}`\n : `input: ${inputTypeName}`;\n const argsArray = [inputArg];\n if (hasAttachment) {\n argsArray.push(`attachments: AttachmentInput[]`);\n }\n const args = argsArray.join(\", \");\n\n return ts`\n export const ${camelCaseActionName} = (${args}) =>\n createAction<${actionTypeName}>(\n \"${constantCaseActionName}\",\n {...input},\n ${hasAttachment ? \"attachments\" : \"undefined\"},\n ${inputSchemaName},\n \"${operation.scope}\"\n );`.raw;\n}\n\nfunction makeActionCreatorWithoutInput(operation: OperationSpecification) {\n if (!operation.name || !operation.schema) return;\n const camelCaseActionName = camelCase(operation.name);\n const constantCaseActionName = constantCase(operation.name);\n const actionTypeName = makeActionTypeName(operation);\n return ts`\n export const ${camelCaseActionName} = () =>\n createAction<${actionTypeName}>(\"${constantCaseActionName}\");`.raw;\n}\n\nfunction makeCreatorsForActionsWithInput(\n args: DocumentModelModuleFileMakerArgs,\n) {\n const actionsWithInput = args.module.operations.filter((a) =>\n isTruthy(a.schema),\n );\n return actionsWithInput.map(makeActionCreatorWithInput).join(\"\\n\\n\");\n}\n\nfunction makeActionCreatorsWithoutInput(\n args: DocumentModelModuleFileMakerArgs,\n) {\n const actionsWithoutInput = args.module.operations.filter(\n (a) => !isTruthy(a.schema),\n );\n return actionsWithoutInput.map(makeActionCreatorWithoutInput).join(\"\\n\\n\");\n}\n\nexport const documentModelOperationsModuleCreatorsFileTemplate = (\n v: DocumentModelModuleFileMakerArgs,\n) =>\n ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nimport { ${makeDocumentModelTypeImports(v)} } from \"document-model\";\nimport {\n${makeActionInputSchemaImports(v)}\n} from '../schema/zod.js';\nimport type {\n${makeActionInputTypeImports(v)}\n} from '../types.js';\nimport type {\n${makeActionTypeImports(v)}\n} from './actions.js';\n\n${makeCreatorsForActionsWithInput(v)}\n\n${makeActionCreatorsWithoutInput(v)}\n`.raw;\n","import type { DocumentModelModuleFileMakerArgs } from \"@powerhousedao/codegen\";\nimport type {\n OperationErrorSpecification,\n OperationSpecification,\n} from \"@powerhousedao/shared\";\nimport { ts } from \"@tmpl/core\";\nimport { pascalCase } from \"change-case\";\nimport { flatMap, prop } from \"remeda\";\n\nfunction getErrorName(error: OperationErrorSpecification) {\n if (!error.name) return;\n const pascalCaseErrorName = pascalCase(error.name);\n return pascalCaseErrorName;\n}\n\nfunction getErrorNames(errors: OperationErrorSpecification[]) {\n return errors.map(getErrorName).filter((name) => name !== undefined);\n}\nfunction getErrorCodeType(errors: OperationErrorSpecification[]) {\n const errorNames = getErrorNames(errors)\n .map((name) => `\"${name}\"`)\n .join(\" |\\n\");\n\n return ts`export type ErrorCode = ${errorNames};`.raw;\n}\n\nfunction errorClassTemplate(error: OperationErrorSpecification) {\n const errorName = getErrorName(error);\n if (!errorName) return;\n\n return ts`\n export class ${errorName} extends Error implements ReducerError {\n errorCode = \"${errorName}\" as ErrorCode;\n constructor(message = \"${errorName}\") {\n super(message);\n }\n }\n `.raw;\n}\n\nfunction getErrorClassImplementations(errors: OperationErrorSpecification[]) {\n return errors\n .map((error) => errorClassTemplate(error))\n .filter(Boolean)\n .join(\"\\n\\n\");\n}\n\nfunction getErrorsImplementations(args: DocumentModelModuleFileMakerArgs) {\n const errors = flatMap(args.module.operations, (o) => prop(o, \"errors\"));\n if (!errors.length) return \"\";\n\n const deduplicatedErrors = errors.reduce((acc, error) => {\n if (!acc.some((e) => getErrorName(e) === getErrorName(error))) {\n acc.push(error);\n }\n return acc;\n }, new Array<OperationErrorSpecification>());\n\n return ts`\n ${getErrorCodeType(deduplicatedErrors)}\n\n export interface ReducerError {\n errorCode: ErrorCode;\n }\n\n ${getErrorClassImplementations(deduplicatedErrors)}\n `.raw;\n}\n\nfunction getActionErrorsExport(operation: OperationSpecification) {\n if (!operation.name) return;\n const errors = operation.errors;\n if (errors.length === 0) return;\n const pascalCaseActionName = pascalCase(operation.name);\n const errorNames = getErrorNames(errors).filter(Boolean).join(\",\\n\");\n return ts`\n ${pascalCaseActionName}: { ${errorNames} }\n `.raw;\n}\n\nfunction getErrorsExport(args: DocumentModelModuleFileMakerArgs) {\n const errorsForEachAction = args.module.operations\n .map(getActionErrorsExport)\n .filter(Boolean)\n .join(\",\\n\");\n\n return ts`\n export const errors = { ${errorsForEachAction} };\n `.raw;\n}\n\nexport const documentModelOperationsModuleErrorFileTemplate = (\n v: DocumentModelModuleFileMakerArgs,\n) =>\n ts`\n ${getErrorsImplementations(v)}\n ${getErrorsExport(v)}\n`.raw;\n","import type { DocumentModelModuleFileMakerArgs } from \"@powerhousedao/codegen\";\nimport type {\n ModuleSpecification,\n OperationSpecification,\n} from \"@powerhousedao/shared\";\nimport { actions } from \"@powerhousedao/shared/document-model\";\nimport { ts } from \"@tmpl/core\";\nimport { camelCase, pascalCase } from \"change-case\";\nimport { getActionTypeName } from \"name-builders\";\n\nfunction getActionTypeNames(actions: OperationSpecification[]) {\n return actions.map(getActionTypeName);\n}\n\nfunction getActionTypeImports(args: DocumentModelModuleFileMakerArgs) {\n const actionTypeNames = getActionTypeNames(args.module.operations);\n return actionTypeNames.join(\",\\n\");\n}\n\nfunction getOperationsInterfaceName(\n pascalCaseDocumentType: string,\n module: ModuleSpecification,\n) {\n const pascalCaseModuleName = pascalCase(module.name);\n return `${pascalCaseDocumentType}${pascalCaseModuleName}Operations`;\n}\n\nfunction getActionOperationFieldName(action: OperationSpecification) {\n if (!action.name) return;\n const camelCaseActionName = camelCase(action.name);\n return `${camelCaseActionName}Operation`;\n}\n\nfunction getActionOperationStateTypeName(\n action: OperationSpecification,\n pascalCaseDocumentType: string,\n) {\n if (!action.scope) return `${pascalCaseDocumentType}State`;\n const pascalCaseStateName = pascalCase(action.scope);\n return `${pascalCaseDocumentType}${pascalCaseStateName}State`;\n}\n\nfunction getActionOperationStateTypeImports(\n args: DocumentModelModuleFileMakerArgs,\n) {\n const stateTypeNames = args.module.operations.map((action) =>\n getActionOperationStateTypeName(action, args.pascalCaseDocumentType),\n );\n\n return Array.from(new Set(stateTypeNames)).join(\",\\n\");\n}\n\nfunction getActionOperationFunction(\n action: OperationSpecification,\n pascalCaseDocumentType: string,\n) {\n const actionOperationStateTypeName = getActionOperationStateTypeName(\n action,\n pascalCaseDocumentType,\n );\n const actionTypeName = getActionTypeName(action);\n return ts`\n (state: ${actionOperationStateTypeName}, action: ${actionTypeName}, dispatch?: SignalDispatch) => void\n`.raw;\n}\n\nfunction getOperationsInterfaceField(\n action: OperationSpecification,\n pascalCaseDocumentType: string,\n) {\n const actionOperationFieldName = getActionOperationFieldName(action);\n const actionOperationFunction = getActionOperationFunction(\n action,\n pascalCaseDocumentType,\n );\n return ts`\n ${actionOperationFieldName}: ${actionOperationFunction}\n `.raw;\n}\n\nfunction getOperationsInterfaceFields(args: DocumentModelModuleFileMakerArgs) {\n return args.module.operations\n .map((action) =>\n getOperationsInterfaceField(action, args.pascalCaseDocumentType),\n )\n .join(\",\");\n}\n\nexport const documentModelOperationsModuleOperationsFileTemplate = (\n v: DocumentModelModuleFileMakerArgs,\n) =>\n ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nimport { type SignalDispatch } from 'document-model';\nimport type {\n ${getActionTypeImports(v)}\n} from './actions.js';\nimport type {\n ${getActionOperationStateTypeImports(v)}\n} from \"../types.js\";\n\nexport interface ${getOperationsInterfaceName(\n v.pascalCaseDocumentType,\n v.module,\n )} {\n ${getOperationsInterfaceFields(v)}\n }\n`.raw;\n","import { ts } from \"@tmpl/core\";\nimport type { DocumentModelFileMakerArgs } from \"file-builders\";\n\nexport const documentModelPhFactoriesFileTemplate = (\n v: DocumentModelFileMakerArgs,\n) =>\n ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n * Factory methods for creating ${v.phDocumentTypeName} instances\n */\nimport type {\n PHAuthState,\n PHDocumentState,\n PHBaseState,\n} from \"document-model\";\nimport {\n createBaseState,\n defaultBaseState,\n} from \"document-model\";\nimport type {\n ${v.phDocumentTypeName},\n ${v.globalStateName},\n ${v.localStateName},\n ${v.phStateName},\n} from \"./types.js\";\nimport { utils } from \"./utils.js\";\n\nexport function defaultGlobalState(): ${v.globalStateName} {\n return ${v.initialGlobalState};\n}\n\nexport function defaultLocalState(): ${v.localStateName} {\n return ${v.initialLocalState};\n}\n\nexport function defaultPHState(): ${v.phStateName} {\n return {\n ...defaultBaseState(),\n global: defaultGlobalState(),\n local: defaultLocalState(),\n };\n}\n\nexport function createGlobalState(\n state?: Partial<${v.globalStateName}>,\n): ${v.globalStateName} {\n return {\n ...defaultGlobalState(),\n ...(state || {}),\n };\n}\n\nexport function createLocalState(\n state?: Partial<${v.localStateName}>,\n): ${v.localStateName} {\n return {\n ...defaultLocalState(),\n ...(state || {}),\n } as ${v.localStateName};\n}\n\nexport function createState(\n baseState?: Partial<PHBaseState>,\n globalState?: Partial<${v.globalStateName}>,\n localState?: Partial<${v.localStateName}>,\n): ${v.phStateName} {\n return {\n ...createBaseState(baseState?.auth, baseState?.document),\n global: createGlobalState(globalState),\n local: createLocalState(localState),\n };\n}\n\n/**\n * Creates a ${v.phDocumentTypeName} with custom global and local state\n * This properly handles the PHBaseState requirements while allowing\n * document-specific state to be set.\n */\nexport function create${v.phDocumentTypeName}(\n state?: Partial<{\n auth?: Partial<PHAuthState>;\n document?: Partial<PHDocumentState>;\n global?: Partial<${v.globalStateName}>;\n local?: Partial<${v.localStateName}>;\n }>,\n): ${v.phDocumentTypeName} {\n const document = utils.createDocument(\n state ? createState(\n createBaseState(state.auth, state.document),\n state.global,\n state.local,\n ) : undefined\n );\n\n return document;\n}\n`.raw;\n","import type {\n ModuleSpecification,\n OperationSpecification,\n} from \"@powerhousedao/shared\";\nimport { ts } from \"@tmpl/core\";\nimport { camelCase, constantCase, kebabCase, pascalCase } from \"change-case\";\nimport type { DocumentModelFileMakerArgs } from \"file-builders\";\n\nfunction makePascalCaseOperationName(operation: OperationSpecification) {\n if (!operation.name) {\n throw new Error(\"Operation is missing name\");\n }\n return pascalCase(operation.name);\n}\n\nfunction makeCamelCaseOperationName(operation: OperationSpecification) {\n if (!operation.name) {\n throw new Error(\"Operation is missing name\");\n }\n return camelCase(operation.name);\n}\n\nfunction makeConstantCaseOperationName(operation: OperationSpecification) {\n if (!operation.name) {\n throw new Error(\"Operation is missing name\");\n }\n return constantCase(operation.name);\n}\n\nfunction makeOperationInputSchema(operation: OperationSpecification) {\n const pascalCaseOperationName = makePascalCaseOperationName(operation);\n return `${pascalCaseOperationName}InputSchema`;\n}\n\nfunction makeOperationInputSchemaImports(modules: ModuleSpecification[]) {\n const moduleOperationInputSchemas = modules\n .flatMap((module) => module.operations.map(makeOperationInputSchema))\n .join(\",\\n\");\n return `import { ${moduleOperationInputSchemas} } from \"./schema/zod.js\";`;\n}\n\nfunction makeModuleOperationsImport(\n module: ModuleSpecification,\n camelCaseDocumentType: string,\n) {\n const pascalCaseModuleName = pascalCase(module.name);\n const kebabCaseModuleName = kebabCase(module.name);\n return `import { ${camelCaseDocumentType}${pascalCaseModuleName}Operations } from \"../src/reducers/${kebabCaseModuleName}.js\";`;\n}\n\nfunction makeModulesOperationsImports(\n modules: ModuleSpecification[],\n camelCaseDocumentType: string,\n) {\n return modules\n .map((module) => makeModuleOperationsImport(module, camelCaseDocumentType))\n .join(\"\\n\");\n}\n\nfunction makeOperationInputSchemaInvocation(operation: OperationSpecification) {\n const operationInputSchema = makeOperationInputSchema(operation);\n const constantCaseOperationName = makeConstantCaseOperationName(operation);\n if (operation.schema === null) {\n return ts`\n if (Object.keys(action.input).length > 0) throw new Error(\"Expected empty input for action ${constantCaseOperationName}\");\n`.raw;\n }\n return ts`${operationInputSchema}().parse(action.input);`.raw;\n}\n\nfunction makeOperationsObjectName(\n module: ModuleSpecification,\n camelCaseDocumentType: string,\n) {\n const pascalCaseModuleName = pascalCase(module.name);\n return `${camelCaseDocumentType}${pascalCaseModuleName}Operations`;\n}\n\nfunction makeOperationName(operation: OperationSpecification) {\n const camelCaseOperationName = makeCamelCaseOperationName(operation);\n return `${camelCaseOperationName}Operation`;\n}\n\nfunction makeOperationInvocation(\n module: ModuleSpecification,\n operation: OperationSpecification,\n camelCaseDocumentType: string,\n) {\n const operationsObjectName = makeOperationsObjectName(\n module,\n camelCaseDocumentType,\n );\n const operationName = makeOperationName(operation);\n\n return ts`\n ${operationsObjectName}.${operationName}((state as any)[action.scope], action as any, dispatch);\n `.raw;\n}\n\nfunction makeModuleOperationCaseStatement(\n module: ModuleSpecification,\n camelCaseDocumentType: string,\n) {\n return module.operations.map(\n (operation) =>\n ts`\n case \"${makeConstantCaseOperationName(operation)}\": {\n ${makeOperationInputSchemaInvocation(operation)}\n ${makeOperationInvocation(module, operation, camelCaseDocumentType)}\n break;\n }\n `.raw,\n );\n}\n\nfunction makeModuleOperationsCaseStatements(\n modules: ModuleSpecification[],\n camelCaseDocumentType: string,\n) {\n return modules\n .map((module) =>\n makeModuleOperationCaseStatement(module, camelCaseDocumentType).join(\n \"\\n\",\n ),\n )\n .join(\"\\n\");\n}\n\nexport const documentModelGenReducerFileTemplate = (\n v: DocumentModelFileMakerArgs,\n) =>\n ts`\n/* eslint-disable @typescript-eslint/no-unsafe-member-access */\n/* eslint-disable @typescript-eslint/no-unsafe-argument */\nimport type { Reducer, StateReducer } from \"document-model\";\nimport { isDocumentAction, createReducer } from \"document-model\";\nimport type { ${v.phStateName} } from \"${v.versionImportPath}\";\n\n${makeModulesOperationsImports(v.specification.modules, v.camelCaseDocumentType)}\n\n${makeOperationInputSchemaImports(v.specification.modules)}\n\nconst stateReducer: StateReducer<${v.phStateName}> =\n (state, action, dispatch) => {\n if (isDocumentAction(action)) {\n return state;\n }\n switch (action.type) {\n ${makeModuleOperationsCaseStatements(v.specification.modules, v.camelCaseDocumentType)}\n default:\n return state;\n }\n }\n\nexport const reducer: Reducer<${v.phStateName}> = createReducer(stateReducer);\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\nexport const documentModelSchemaIndexTemplate = ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nexport * from \"./types.js\";\nexport * from \"./zod.js\";\n`.raw;\n","import { ts } from \"@tmpl/core\";\nimport type { DocumentModelFileMakerArgs } from \"file-builders\";\n\nfunction buildEmptyLocalStateType(\n hasLocalSchema: boolean,\n localStateName: string,\n) {\n if (hasLocalSchema) return \"\";\n\n return `type ${localStateName} = Record<PropertyKey, never>;`;\n}\n\nfunction buildLocalStateTypeImport(\n hasLocalSchema: boolean,\n localStateName: string,\n) {\n if (!hasLocalSchema) return \"\";\n return localStateName;\n}\nexport const documentModelGenTypesTemplate = (v: DocumentModelFileMakerArgs) =>\n ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nimport type { PHDocument, PHBaseState } from 'document-model';\nimport type { ${v.actionTypeName} } from './actions.js';\nimport type {\n ${v.stateName} as ${v.globalStateName},\n ${buildLocalStateTypeImport(v.hasLocalSchema, v.localStateName)}\n} from './schema/types.js';\n\n${buildEmptyLocalStateType(v.hasLocalSchema, v.localStateName)}\n\ntype ${v.phStateName} = PHBaseState & {\n global: ${v.globalStateName};\n local: ${v.localStateName};\n};\ntype ${v.phDocumentTypeName} = PHDocument<${v.phStateName}>;\n\nexport * from './schema/types.js';\n\nexport type { \n ${v.globalStateName}, \n ${v.localStateName},\n ${v.phStateName}, \n ${v.actionTypeName},\n ${v.phDocumentTypeName},\n};\n`.raw;\n","import { ts } from \"@tmpl/core\";\nimport type { DocumentModelFileMakerArgs } from \"file-builders\";\n\nexport const documentModelGenUtilsTemplate = (v: DocumentModelFileMakerArgs) =>\n ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nimport type {\n DocumentModelUtils,\n} from \"document-model\";\nimport { \n baseCreateDocument,\n baseSaveToFileHandle,\n baseLoadFromInput,\n defaultBaseState,\n generateId,\n } from \"document-model\";\nimport { reducer } from './reducer.js';\nimport { ${v.documentTypeVariableName} } from \"./document-type.js\";\nimport {\n ${v.assertIsPhDocumentOfTypeFunctionName},\n ${v.assertIsPhStateOfTypeFunctionName},\n ${v.isPhDocumentOfTypeFunctionName},\n ${v.isPhStateOfTypeFunctionName},\n} from \"./document-schema.js\";\nimport type { ${v.globalStateName}, ${v.localStateName}, ${v.phStateName} } from './types.js';\n\nexport const initialGlobalState: ${v.globalStateName} = ${v.initialGlobalState};\nexport const initialLocalState: ${v.localStateName} = ${v.initialLocalState};\n\nexport const utils: DocumentModelUtils<${v.phStateName}> = {\n fileExtension: \"${v.documentModelState.extension}\",\n createState(state) {\n return { ...defaultBaseState(), global: { ...initialGlobalState, ...state?.global }, local: { ...initialLocalState, ...state?.local } };\n },\n createDocument(state) {\n const document = baseCreateDocument(\n utils.createState,\n state\n );\n\n document.header.documentType = ${v.documentTypeVariableName};\n\n // for backwards compatibility, but this is NOT a valid signed document id\n document.header.id = generateId();\n\n return document;\n },\n saveToFileHandle(document, input) {\n return baseSaveToFileHandle(document, input);\n },\n loadFromInput(input) {\n return baseLoadFromInput(input, reducer);\n },\n isStateOfType(state) {\n return ${v.isPhStateOfTypeFunctionName}(state);\n },\n assertIsStateOfType(state) {\n return ${v.assertIsPhStateOfTypeFunctionName}(state);\n },\n isDocumentOfType(document) {\n return ${v.isPhDocumentOfTypeFunctionName}(document);\n },\n assertIsDocumentOfType(document) {\n return ${v.assertIsPhDocumentOfTypeFunctionName}(document);\n },\n};\n`.raw;\n","import { ts } from \"@tmpl/core\";\nimport type { DocumentModelFileMakerArgs } from \"file-builders\";\n\nexport const documentModelHooksFileTemplate = (v: DocumentModelFileMakerArgs) =>\n ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nimport type { DocumentDispatch } from \"@powerhousedao/reactor-browser\";\nimport {\n useDocumentById,\n useDocumentsInSelectedDrive,\n useDocumentsInSelectedFolder,\n useSelectedDocument,\n} from \"@powerhousedao/reactor-browser\";\nimport type {\n ${v.actionTypeName},\n ${v.phDocumentTypeName},\n} from \"${v.versionImportPath}\";\nimport { \n ${v.assertIsPhDocumentOfTypeFunctionName},\n ${v.isPhDocumentOfTypeFunctionName} \n} from \"./gen/document-schema.js\";\n\n/** Hook to get a ${v.pascalCaseDocumentType} document by its id */\nexport function ${v.useByIdHookName}(\n documentId: string | null | undefined,\n):\n | [${v.phDocumentTypeName}, DocumentDispatch<${v.actionTypeName}>]\n | [undefined, undefined] {\n const [document, dispatch] = useDocumentById(documentId);\n if (!${v.isPhDocumentOfTypeFunctionName}(document)) return [undefined, undefined];\n return [document, dispatch];\n}\n\n/** Hook to get the selected ${v.pascalCaseDocumentType} document */\nexport function ${v.useSelectedHookName}():\n | [${v.phDocumentTypeName}, DocumentDispatch<${v.actionTypeName}>] {\n const [document, dispatch] = useSelectedDocument();\n\n ${v.assertIsPhDocumentOfTypeFunctionName}(document);\n return [document, dispatch] as const;\n}\n\n/** Hook to get all ${v.pascalCaseDocumentType} documents in the selected drive */\nexport function ${v.useInSelectedDriveHookName}() {\n const documentsInSelectedDrive = useDocumentsInSelectedDrive();\n return documentsInSelectedDrive?.filter(${v.isPhDocumentOfTypeFunctionName});\n}\n\n/** Hook to get all ${v.pascalCaseDocumentType} documents in the selected folder */\nexport function ${v.useInSelectedFolderHookName}() {\n const documentsInSelectedFolder = useDocumentsInSelectedFolder();\n return documentsInSelectedFolder?.filter(${v.isPhDocumentOfTypeFunctionName});\n}\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\nexport const documentModelIndexTemplate = ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nexport * from \"./gen/index.js\";\nexport * from \"./src/index.js\";\nexport * from \"./hooks.js\";\nexport * from \"./module.js\";\nexport { actions } from \"./actions.js\";\nexport { utils } from \"./utils.js\";\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\ntype DocumentModelModuleFileTemplateArgs = {\n phStateName: string;\n pascalCaseDocumentType: string;\n version: number;\n};\nexport function documentModelModuleFileTemplate({\n phStateName,\n pascalCaseDocumentType,\n version,\n}: DocumentModelModuleFileTemplateArgs) {\n const template = ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\n import type { DocumentModelModule } from \"document-model\";\n import { createState, defaultBaseState } from \"document-model\";\n import type { ${phStateName} } from \"./gen/types.js\";\n import { documentModel } from \"./gen/document-model.js\";\n import { reducer } from \"./gen/reducer.js\";\n import { actions } from \"./actions.js\";\n import { utils } from \"./utils.js\";\n\n /** Document model module for the ${pascalCaseDocumentType} document type */\n export const ${pascalCaseDocumentType}: DocumentModelModule<${phStateName}> = {\n version: ${version},\n reducer,\n actions,\n utils,\n documentModel: createState(defaultBaseState(), documentModel),\n };\n`;\n\n return template.raw;\n}\n","import { ts } from \"@tmpl/core\";\n\nexport const documentModelSrcIndexFileTemplate = ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nexport * from \"./utils.js\";\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\nexport const documentModelSrcUtilsTemplate = ts`\nexport {};\n`.raw;\n","import { ts } from \"@tmpl/core\";\nimport type { DocumentModelFileMakerArgs } from \"file-builders\";\n\nexport const documentModelTestFileTemplate = (v: DocumentModelFileMakerArgs) =>\n ts`\n/**\n * This is a scaffold file meant for customization:\n * - change it by adding new tests or modifying the existing ones\n */\n/**\n * This is a scaffold file meant for customization:\n * - change it by adding new tests or modifying the existing ones\n */\n\nimport { describe, it, expect } from \"vitest\";\nimport {\n utils,\n initialGlobalState,\n initialLocalState,\n ${v.documentTypeVariableName},\n ${v.isPhDocumentOfTypeFunctionName},\n ${v.assertIsPhDocumentOfTypeFunctionName},\n ${v.isPhStateOfTypeFunctionName},\n ${v.assertIsPhStateOfTypeFunctionName},\n} from \"${v.versionImportPath}\";\nimport { ZodError } from \"zod\";\n\ndescribe(\"${v.pascalCaseDocumentType} Document Model\", () => {\n it(\"should create a new ${v.pascalCaseDocumentType} document\", () => {\n const document = utils.createDocument();\n\n expect(document).toBeDefined();\n expect(document.header.documentType).toBe(${v.documentTypeVariableName});\n });\n\n it(\"should create a new ${v.pascalCaseDocumentType} document with a valid initial state\", () => {\n const document = utils.createDocument();\n expect(document.state.global).toStrictEqual(initialGlobalState);\n expect(document.state.local).toStrictEqual(initialLocalState);\n expect(${v.isPhDocumentOfTypeFunctionName}(document)).toBe(true);\n expect(${v.isPhStateOfTypeFunctionName}(document.state)).toBe(true);\n });\n it(\"should reject a document that is not a ${v.pascalCaseDocumentType} document\", () => {\n const wrongDocumentType = utils.createDocument();\n wrongDocumentType.header.documentType = \"the-wrong-thing-1234\";\n try {\n expect(${v.assertIsPhDocumentOfTypeFunctionName}(wrongDocumentType)).toThrow();\n expect(${v.isPhDocumentOfTypeFunctionName}(wrongDocumentType)).toBe(false);\n } catch (error) {\n expect(error).toBeInstanceOf(ZodError);\n }\n });\n const wrongState = utils.createDocument();\n // @ts-expect-error - we are testing the error case\n wrongState.state.global = {\n ...{ notWhat: \"you want\" },\n };\n try {\n expect(${v.isPhStateOfTypeFunctionName}(wrongState.state)).toBe(false);\n expect(${v.assertIsPhStateOfTypeFunctionName}(wrongState.state)).toThrow();\n expect(${v.isPhDocumentOfTypeFunctionName}(wrongState)).toBe(false);\n expect(${v.assertIsPhDocumentOfTypeFunctionName}(wrongState)).toThrow();\n } catch (error) {\n expect(error).toBeInstanceOf(ZodError);\n }\n\n const wrongInitialState = utils.createDocument();\n // @ts-expect-error - we are testing the error case\n wrongInitialState.initialState.global = {\n ...{ notWhat: \"you want\" },\n };\n try {\n expect(${v.isPhStateOfTypeFunctionName}(wrongInitialState.state)).toBe(false);\n expect(${v.assertIsPhStateOfTypeFunctionName}(wrongInitialState.state)).toThrow();\n expect(${v.isPhDocumentOfTypeFunctionName}(wrongInitialState)).toBe(false);\n expect(${v.assertIsPhDocumentOfTypeFunctionName}(wrongInitialState)).toThrow();\n } catch (error) {\n expect(error).toBeInstanceOf(ZodError);\n }\n\n const missingIdInHeader = utils.createDocument();\n // @ts-expect-error - we are testing the error case\n delete missingIdInHeader.header.id;\n try {\n expect(${v.isPhDocumentOfTypeFunctionName}(missingIdInHeader)).toBe(false);\n expect(${v.assertIsPhDocumentOfTypeFunctionName}(missingIdInHeader)).toThrow();\n } catch (error) {\n expect(error).toBeInstanceOf(ZodError);\n }\n\n const missingNameInHeader = utils.createDocument();\n // @ts-expect-error - we are testing the error case\n delete missingNameInHeader.header.name;\n try {\n expect(${v.isPhDocumentOfTypeFunctionName}(missingNameInHeader)).toBe(false);\n expect(${v.assertIsPhDocumentOfTypeFunctionName}(missingNameInHeader)).toThrow();\n } catch (error) {\n expect(error).toBeInstanceOf(ZodError);\n }\n\n const missingCreatedAtUtcIsoInHeader = utils.createDocument();\n // @ts-expect-error - we are testing the error case\n delete missingCreatedAtUtcIsoInHeader.header.createdAtUtcIso;\n try {\n expect(${v.isPhDocumentOfTypeFunctionName}(missingCreatedAtUtcIsoInHeader)).toBe(false);\n expect(${v.assertIsPhDocumentOfTypeFunctionName}(missingCreatedAtUtcIsoInHeader)).toThrow();\n } catch (error) {\n expect(error).toBeInstanceOf(ZodError);\n }\n\n const missingLastModifiedAtUtcIsoInHeader = utils.createDocument();\n // @ts-expect-error - we are testing the error case\n delete missingLastModifiedAtUtcIsoInHeader.header.lastModifiedAtUtcIso;\n try {\n expect(${v.isPhDocumentOfTypeFunctionName}(missingLastModifiedAtUtcIsoInHeader)).toBe(false);\n expect(\n ${v.assertIsPhDocumentOfTypeFunctionName}(missingLastModifiedAtUtcIsoInHeader),\n ).toThrow();\n } catch (error) {\n expect(error).toBeInstanceOf(ZodError);\n }\n});\n`.raw;\n","import type {\n ModuleSpecification,\n OperationSpecification,\n} from \"@powerhousedao/shared\";\nimport { ts } from \"@tmpl/core\";\nimport { camelCase, constantCase, pascalCase } from \"change-case\";\nimport type { DocumentModelModuleFileMakerArgs } from \"file-builders\";\nimport { filter, isString, map, pipe, prop } from \"remeda\";\n\nfunction makeModuleOperationsTypeName(module: ModuleSpecification) {\n const pascalCaseModuleName = pascalCase(module.name);\n return `${pascalCaseModuleName}Operations`;\n}\n\nfunction makeCamelCaseOperationNamesForImport(\n operations: OperationSpecification[],\n) {\n return pipe(\n operations,\n map(prop(\"name\")),\n filter(isString),\n map((n) => camelCase(n)),\n );\n}\n\nfunction makeOperationInputSchemasForImport(\n operations: OperationSpecification[],\n) {\n return pipe(\n operations,\n map(prop(\"name\")),\n filter(isString),\n map((n) => `${pascalCase(n)}InputSchema`),\n );\n}\n\nexport function makeTestCaseForOperation(\n operation: OperationSpecification,\n isPhDocumentOfTypeFunctionName: string,\n) {\n if (operation.name === null) {\n throw new Error(`Operation is missing name.`);\n }\n const camelCaseActionName = camelCase(operation.name);\n const pascalCaseActionName = pascalCase(operation.name);\n const constantCaseActionName = constantCase(operation.name);\n const actionInputSchemaName = `${pascalCaseActionName}InputSchema`;\n const scope = operation.scope;\n return ts`\n it('should handle ${camelCaseActionName} operation', () => {\n const document = utils.createDocument();\n const input = generateMock(\n ${actionInputSchemaName}(),\n );\n\n const updatedDocument = reducer(\n document,\n ${camelCaseActionName}(input),\n );\n\n expect(${isPhDocumentOfTypeFunctionName}(updatedDocument)).toBe(true);\n expect(updatedDocument.operations.${scope}).toHaveLength(1);\n expect(updatedDocument.operations.${scope}[0].action.type).toBe(\n \"${constantCaseActionName}\",\n );\n expect(updatedDocument.operations.${scope}[0].action.input).toStrictEqual(input);\n expect(updatedDocument.operations.${scope}[0].index).toEqual(0);\n });\n `.raw;\n}\n\nexport function makeOperationImportNames(v: DocumentModelModuleFileMakerArgs) {\n const operationNames = makeCamelCaseOperationNamesForImport(\n v.module.operations,\n );\n const inputSchemaNames = makeOperationInputSchemasForImport(\n v.module.operations,\n );\n const importNames = [\n \"reducer\",\n \"utils\",\n v.isPhDocumentOfTypeFunctionName,\n ...operationNames,\n ...inputSchemaNames,\n ];\n return importNames;\n}\n\nexport function makeOperationsImports(v: DocumentModelModuleFileMakerArgs) {\n const importNames = makeOperationImportNames(v).join(\"\\n\");\n return ts`\n import {\n ${importNames}\n } from \"${v.versionImportPath}\";\n `.raw;\n}\n\nfunction makeTestCasesForOperations(\n operations: OperationSpecification[],\n isPhDocumentOfTypeFunctionName: string,\n) {\n return operations\n .map((operation) =>\n makeTestCaseForOperation(operation, isPhDocumentOfTypeFunctionName),\n )\n .join(\"\\n\\n\");\n}\nexport const documentModelOperationsModuleTestFileTemplate = (\n v: DocumentModelModuleFileMakerArgs,\n) =>\n ts`\n/**\n * This is a scaffold file meant for customization:\n * - change it by adding new tests or modifying the existing ones\n */\n\nimport { describe, it, expect } from 'vitest';\nimport { generateMock } from '@powerhousedao/common';\nimport {\n reducer,\n utils,\n ${v.isPhDocumentOfTypeFunctionName},\n ${makeCamelCaseOperationNamesForImport(v.module.operations)},\n ${makeOperationInputSchemasForImport(v.module.operations)},\n} from \"${v.versionImportPath}\";\n\ndescribe(\"${makeModuleOperationsTypeName(v.module)}\", () => {\n ${makeTestCasesForOperations(v.module.operations, v.isPhDocumentOfTypeFunctionName)}\n});\n\n`.raw;\n","import { ts } from \"@tmpl/core\";\nimport type { DocumentModelFileMakerArgs } from \"file-builders\";\n\nexport const upgradeManifestTemplate = (v: DocumentModelFileMakerArgs) =>\n ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\n import type { UpgradeManifest } from \"document-model\";\n import { latestVersion, supportedVersions } from \"./versions.js\";\n\n export const ${v.upgradeManifestName}: UpgradeManifest<typeof supportedVersions> = {\n documentType: \"${v.documentModelState.id}\",\n latestVersion,\n supportedVersions,\n upgrades: {},\n };\n `.raw;\n","import { ts } from \"@tmpl/core\";\nimport type { DocumentModelFileMakerArgs } from \"file-builders\";\n\nexport const upgradeTransitionTemplate = (v: DocumentModelFileMakerArgs) =>\n ts`\nimport type { Action, PHDocument, UpgradeTransition } from \"document-model\";\nimport type { ${v.phStateName} as StateV${v.version - 1} } from \"${v.documentModelImportPath}/v${v.version - 1}\";\nimport type { ${v.phStateName} as StateV${v.version} } from \"${v.documentModelImportPath}/v${v.version}\";\n\nfunction upgradeReducer(\n document: PHDocument<StateV${v.version - 1}>,\n action: Action,\n): PHDocument<StateV${v.version}> {\n return {\n ...document,\n };\n}\n\nexport const v${v.version}: UpgradeTransition = {\n toVersion: ${v.version},\n upgradeReducer,\n description: \"\",\n};\n`.raw;\n","import { ts } from \"@tmpl/core\";\nimport type { DocumentModelVariableNames } from \"file-builders\";\n\nexport const documentModelUtilsTemplate = ({\n phStateName,\n pascalCaseDocumentType,\n}: DocumentModelVariableNames) =>\n ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nimport type { DocumentModelUtils } from \"document-model\";\nimport type { ${phStateName} } from \"./gen/types.js\";\nimport { utils as genUtils } from \"./gen/utils.js\";\nimport * as customUtils from \"./src/utils.js\";\n\n/** Utils for the ${pascalCaseDocumentType} document model */\nexport const utils: DocumentModelUtils<${phStateName}> = { ...genUtils, ...customUtils };\n`.raw;\n","export function getDocumentType(documentTypes: string[]) {\n if (!documentTypes.length) return `\"*\"`;\n return documentTypes.map((type) => `\"${type}\"`).join(\", \");\n}\n","import { ts } from \"@tmpl/core\";\nimport { getDocumentType } from \"../utils.js\";\n\nexport const analyticsFactoryTemplate = (v: {\n pascalCaseName: string;\n camelCaseName: string;\n documentTypes: string[];\n}) =>\n ts`\nimport type { \n ProcessorApp,\n ProcessorFactoryBuilder,\n ProcessorRecord, \n IProcessorHostModule,\n} from \"@powerhousedao/reactor-browser\";\nimport { type PHDocumentHeader } from \"document-model\";\nimport { ${v.pascalCaseName} } from \"./processor.js\";\n\nexport const ${v.camelCaseName}FactoryBuilder: ProcessorFactoryBuilder = (module: IProcessorHostModule) => async (driveHeader: PHDocumentHeader, processorApp?: ProcessorApp) => {\n return [\n {\n processor: new ${v.pascalCaseName}(module.analyticsStore),\n filter: {\n branch: [\"main\"],\n documentId: [\"*\"],\n scope: [\"*\"],\n documentType: [${getDocumentType(v.documentTypes)}],\n },\n },\n ];\n}\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\nexport const analyticsIndexTemplate = ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nexport * from \"./factory.js\";\nexport * from \"./processor.js\";\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\nexport const analyticsProcessorTemplate = (v: { pascalCaseName: string }) =>\n ts`\nimport type { AnalyticsSeriesInput, AnalyticsPath, IAnalyticsStore } from \"@powerhousedao/analytics-engine-core\";\nimport type { OperationWithContext, IProcessor } from \"@powerhousedao/reactor-browser\";\n\nexport class ${v.pascalCaseName} implements IProcessor {\n private readonly NAMESPACE = \"${v.pascalCaseName}\";\n\n private readonly inputs: AnalyticsSeriesInput[] = [];\n\n constructor(private readonly analyticsStore: IAnalyticsStore) {\n //\n }\n\n onOperations(operations: OperationWithContext[]): Promise<void> {\n return Promise.resolve();\n }\n\n onDisconnect(): Promise<void> {\n return Promise.resolve();\n }\n\n private async clearSource(source: AnalyticsPath) {\n try {\n await this.analyticsStore.clearSeriesBySource(source, true);\n } catch (e) {\n console.error(e);\n }\n }\n}\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\nexport const factoryBuildersTemplate = ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nimport type { ProcessorFactoryBuilder } from \"@powerhousedao/reactor\";\n\nexport const processorFactoryBuilders: ProcessorFactoryBuilder[] = [];\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\nexport const processorsFactoryTemplate = ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nimport type {\n IProcessorHostModule,\n ProcessorRecord,\n} from \"@powerhousedao/reactor-browser\";\nimport type { PHDocumentHeader } from \"document-model\";\n\nexport const processorFactory = async (module: IProcessorHostModule) => {\n const { processorFactoryBuilders } =\n module.processorApp === \"connect\"\n ? await import(\"./connect.js\")\n : await import(\"./switchboard.js\");\n\n const factories = await Promise.all(\n processorFactoryBuilders.map(\n async (buildFactory) => await buildFactory(module),\n ),\n );\n\n // Return the inner function that will be called for each drive\n return async (driveHeader: PHDocumentHeader): Promise<ProcessorRecord[]> => {\n const processors: ProcessorRecord[] = [];\n\n // Call each cached factory with the driveHeader\n for (const factory of factories) {\n const factoryProcessors = await factory(driveHeader, module.processorApp);\n processors.push(...factoryProcessors);\n }\n\n return processors;\n };\n};\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\nexport const processorsIndexTemplate = ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nexport { processorFactory } from \"./factory.js\";\n`.raw;\n","import { ts } from \"@tmpl/core\";\nimport { getDocumentType } from \"../utils.js\";\n\nexport const relationalDbFactoryTemplate = (v: {\n camelCaseName: string;\n pascalCaseName: string;\n documentTypes: string[];\n}) =>\n ts`\nimport type { \n IProcessorHostModule,\n ProcessorApp,\n ProcessorFactoryBuilder,\n ProcessorFilter,\n ProcessorRecord,\n } from \"@powerhousedao/reactor-browser\"\nimport type { PHDocumentHeader } from \"document-model\";\nimport { ${v.pascalCaseName} } from \"./processor.js\";\n\nexport const ${v.camelCaseName}FactoryBuilder: ProcessorFactoryBuilder = (module: IProcessorHostModule) => async (driveHeader: PHDocumentHeader, processorApp?: ProcessorApp) => {\n // Create a namespace for the processor and the provided drive id\n const namespace = ${v.pascalCaseName}.getNamespace(driveHeader.id);\n\n // Create a namespaced db for the processor\n const store = await module.relationalDb.createNamespace<${v.pascalCaseName}>(\n namespace,\n );\n\n // Create a filter for the processor\n const filter: ProcessorFilter = {\n branch: [\"main\"],\n documentId: [\"*\"],\n documentType: [${getDocumentType(v.documentTypes)}],\n scope: [\"global\"],\n };\n\n // Create the processor\n const processor = new ${v.pascalCaseName}(namespace, filter, store);\n return [\n {\n processor,\n filter,\n },\n ];\n}\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\nexport const relationalDbIndexTemplate = ts`\n/**\n * WARNING: DO NOT EDIT\n * This file is auto-generated and updated by codegen\n */\nexport * from \"./factory.js\";\nexport * from \"./processor.js\";\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\nexport const relationalDbMigrationsTemplate = () =>\n ts`\nimport type { IRelationalDb } from \"@powerhousedao/reactor-browser\"\n\nexport async function up(db: IRelationalDb<any>): Promise<void> {\n // Create table \n await db.schema\n .createTable(\"todo\")\n .addColumn(\"task\", \"varchar(255)\")\n .addColumn(\"status\", \"boolean\")\n .addPrimaryKeyConstraint(\"todo_pkey\", [\n \"task\"\n ])\n .ifNotExists()\n .execute();\n}\n\nexport async function down(db: IRelationalDb<any>): Promise<void> {\n // drop table\n await db.schema.dropTable(\"todo\").execute();\n}\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\nconst defaultNamespaceComment =\n '// Default namespace: `${this.name}_${driveId.replaceAll(\"-\", \"_\")}`';\nexport const relationalDbProcessorTemplate = (v: { pascalCaseName: string }) =>\n ts`\nimport { RelationalDbProcessor } from \"@powerhousedao/reactor-browser\";\nimport type { OperationWithContext } from \"document-model\";\nimport { up } from \"./migrations.js\";\nimport type { DB } from \"./schema.js\";\n\nexport class ${v.pascalCaseName} extends RelationalDbProcessor<DB> {\n onOperations(operations: OperationWithContext[]): Promise<void> {\n return Promise.resolve();\n }\n\n onDisconnect(): Promise<void> {\n return Promise.resolve();\n }\n\n static override getNamespace(driveId: string): string {\n ${defaultNamespaceComment}\n return super.getNamespace(driveId);\n }\n\n override async initAndUpgrade(): Promise<void> {\n await up(this.relationalDb);\n }\n}\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\nexport const relationalDbSchemaTemplate = () =>\n ts`\nexport interface Todo {\n status: boolean | null;\n task: string;\n}\n\nexport interface DB {\n todo: Todo;\n}\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\nexport const subgraphIndexFileTemplate = (v: {\n pascalCaseName: string;\n kebabCaseName: string;\n}) =>\n ts`\nimport { BaseSubgraph } from \"@powerhousedao/reactor-api\";\nimport type { DocumentNode } from \"graphql\";\nimport { schema } from \"./schema.js\";\nimport { getResolvers } from \"./resolvers.js\";\n\nexport class ${v.pascalCaseName}Subgraph extends BaseSubgraph {\n name = \"${v.kebabCaseName}\";\n typeDefs: DocumentNode = schema;\n resolvers = getResolvers(this);\n additionalContextFields = {};\n async onSetup() {}\n async onDisconnect() {}\n}\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\nexport const subgraphLibFileTemplate = () =>\n ts`\n/**\n * This is a scaffold file meant for customization.\n * Delete the file and run the code generator again to have it reset\n */\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\nexport const customSubgraphSchemaTemplate = (v: {\n pascalCaseName: string;\n camelCaseName: string;\n}) =>\n ts`\nimport { gql } from \"graphql-tag\";\nimport type { DocumentNode } from \"graphql\";\n\nexport const schema: DocumentNode = gql\\`\n\"\"\"\n${v.pascalCaseName} Queries\n\"\"\"\ntype ${v.pascalCaseName}Queries {\n example(driveId: String!): String\n}\n\ntype Query {\n ${v.camelCaseName}: ${v.pascalCaseName}Queries!\n}\n\n\\`\n`.raw;\n","import { ts } from \"@tmpl/core\";\n\nexport const customSubgraphResolversTemplate = (v: {\n pascalCaseName: string;\n camelCaseName: string;\n}) =>\n ts`\nimport { type ISubgraph } from \"@powerhousedao/reactor-api\";\n\nexport const getResolvers = (subgraph: ISubgraph): Record<string, unknown> => {\n const reactor = subgraph.reactorClient;\n\n return ({\n Query: {\n ${v.camelCaseName}: () => ({}), // namespace resolver for nested queries\n },\n ${v.pascalCaseName}Queries: {\n example: async (parent: unknown, args: { driveId: string }) => {\n return \"example\";\n },\n },\n });\n};\n`.raw;\n","import arg from \"arg\";\n\nexport const configSpec = {\n \"--document-models\": String,\n \"--editors\": String,\n \"--interactive\": Boolean,\n \"--skip-format\": Boolean,\n \"--watch\": Boolean,\n \"-i\": \"--interactive\",\n \"-sf\": \"--skip-format\",\n \"-w\": \"--watch\",\n} as const;\n\nexport function parseArgs<T extends arg.Spec>(argv: string[], spec: T) {\n const args = arg(spec, {\n permissive: true,\n argv,\n });\n\n return args;\n}\n\nexport function parseConfig(argv: string[]) {\n const config: Partial<{\n documentModelsDir?: string;\n editorsDir?: string;\n skipFormat?: boolean;\n interactive?: boolean;\n watch?: boolean;\n }> = {};\n const args = parseArgs(argv, configSpec);\n\n if (\"--document-models\" in args) {\n config.documentModelsDir = args[\"--document-models\"];\n }\n\n if (\"--editors\" in args) {\n config.editorsDir = args[\"--editors\"];\n }\n\n if (\"--skip-format\" in args) {\n config.skipFormat = true;\n }\n if (\"--interactive\" in args) {\n config.interactive = true;\n }\n if (\"--watch\" in args) {\n config.watch = true;\n }\n\n return config;\n}\n","/** Document model metadata for the `powerhouse/document-model` document type.\n *\n * Assumed to always be present during codegen.\n */\nexport const documentModelDocumentTypeMetadata = {\n documentModelId: \"powerhouse/document-model\",\n documentModelDocumentTypeName: \"DocumentModelDocument\",\n documentModelDirName: \"document-model\",\n documentModelImportPath: \"document-model\",\n} as const;\n","import type { DocumentModelDocumentTypeMetadata } from \"file-builders\";\nimport { readdirSync } from \"fs\";\nimport { getDocumentModelVariableNames } from \"name-builders\";\nimport { join } from \"path\";\nimport {\n filter,\n first,\n isDefined,\n isStrictEqual,\n isString,\n map,\n pipe,\n prop,\n when,\n} from \"remeda\";\nimport type { Project } from \"ts-morph\";\nimport { getOrCreateDirectory, loadDocumentModelInDir } from \"utils\";\n\ntype GetDocumentTypeMetadataArgs = {\n project: Project;\n documentModelId: string;\n};\n/** Gets the document model metadata for the --document-type argument\n * passed to the `generate --editor` and `generate --app` commands.\n */\nexport function getDocumentTypeMetadata({\n project,\n documentModelId,\n}: GetDocumentTypeMetadataArgs) {\n const { directory: documentModelsDir } = getOrCreateDirectory(\n project,\n \"document-models\",\n );\n const documentModelsDirPath = documentModelsDir.getPath();\n\n const documentModelVariableNames = pipe(\n readdirSync(documentModelsDirPath, { withFileTypes: true }),\n map(loadDocumentModelInDir),\n filter(isDefined),\n filter((state) => isStrictEqual(state.id, documentModelId)),\n first(),\n prop(\"name\"),\n when(isString, getDocumentModelVariableNames),\n );\n\n if (!documentModelVariableNames) {\n throw new Error(\n `Failed to get document type metadata for document type: ${documentModelId}.`,\n );\n }\n\n const { kebabCaseDocumentType, phDocumentTypeName } =\n documentModelVariableNames;\n\n const documentTypeMetadata: DocumentModelDocumentTypeMetadata = {\n documentModelId,\n documentModelDocumentTypeName: phDocumentTypeName,\n documentModelDirName: kebabCaseDocumentType,\n documentModelImportPath: join(\"document-models\", kebabCaseDocumentType),\n };\n\n return documentTypeMetadata;\n}\n","import { spawnAsync } from \"@powerhousedao/shared/clis\";\nimport { format, type BuiltInParserName, type LiteralUnion } from \"prettier\";\nimport type { SourceFile } from \"ts-morph\";\n\n/** Formats the text of a ts-morph source file with prettier before writing the text to memory */\nexport async function formatSourceFileWithPrettier(sourceFile: SourceFile) {\n sourceFile.organizeImports();\n const sourceText = sourceFile.getFullText();\n const formattedText = await formatSafe(sourceText);\n sourceFile.replaceWithText(formattedText);\n}\n\nexport async function formatSafe(\n sourceText: string,\n parser: LiteralUnion<BuiltInParserName, string> = \"typescript\",\n) {\n try {\n const formattedText = await format(sourceText, {\n parser,\n });\n return formattedText;\n } catch (error) {\n console.error(error);\n return sourceText;\n }\n}\n\nexport async function runPrettier() {\n await spawnAsync(\"npx\", [\"prettier\", \"--write\", \".\"]);\n}\n","import { join } from \"path\";\nimport { isIncludedIn, merge, pipe } from \"remeda\";\nimport { type Project } from \"ts-morph\";\nimport {\n getBooleanPropertyValue,\n getOrCreateDirectory,\n getStringArrayPropertyElements,\n getStringPropertyValue,\n} from \"utils\";\nimport { z } from \"zod\";\n\nconst EditorMetadataSchema = z.object({\n name: z.string(),\n id: z.string(),\n dirName: z.string(),\n documentTypes: z.array(z.string()),\n});\n\nexport function getEditorMetadata(project: Project, dirName: string) {\n const { directory: editorDir } = getOrCreateDirectory(\n project,\n join(\"editors\", dirName),\n );\n const parsedMetadata = pipe(\n editorDir,\n (dir) => dir.getSourceFile(\"module.ts\"),\n (sourceFile) => ({\n dirName: sourceFile?.getDirectory().getBaseName(),\n id: getStringPropertyValue(sourceFile, \"id\"),\n name: getStringPropertyValue(sourceFile, \"name\"),\n documentTypes: getStringArrayPropertyElements(\n sourceFile,\n \"documentTypes\",\n ),\n }),\n (data) => EditorMetadataSchema.safeParse(data),\n );\n\n if (parsedMetadata.success) return parsedMetadata.data;\n return undefined;\n}\n\nexport function getAppMetadata(project: Project, dirName: string) {\n const editorMetadata = getEditorMetadata(project, dirName);\n if (\n !editorMetadata ||\n isIncludedIn(\"powerhouse/document-drive\", editorMetadata.documentTypes)\n )\n return undefined;\n\n const appMetadata = pipe(\n project.getSourceFile(join(\"editors\", dirName, \"config.ts\")),\n (sourceFile) => ({\n isDragAndDropEnabled: getBooleanPropertyValue(\n sourceFile,\n \"isDragAndDropEnabled\",\n true,\n ),\n allowedDocumentTypes: getStringArrayPropertyElements(\n sourceFile,\n \"allowedDocumentTypes\",\n ),\n }),\n );\n return merge(editorMetadata, appMetadata);\n}\n","import { join } from \"path\";\nimport {\n conditional,\n constant,\n filter,\n isIncludedIn,\n isNot,\n isString,\n map,\n pipe,\n split,\n startsWith,\n} from \"remeda\";\nimport { type Project, type SourceFile } from \"ts-morph\";\nimport {\n getAllImportModuleSpecifiers,\n getAllImportNames,\n getOrCreateDirectory,\n getStringArrayPropertyElements,\n} from \"utils\";\n\nexport function getProcessorMetadata(project: Project, dirName: string) {\n const { directory: processorsDir } = getOrCreateDirectory(\n project,\n \"processors\",\n );\n const { directory: processorDir } = getOrCreateDirectory(\n project,\n join(\"processors\", dirName),\n );\n\n const connectProcessorNames = getProcessorNames(\n processorsDir.getSourceFile(\"connect.ts\"),\n );\n\n const switchboardProcessorNames = getProcessorNames(\n processorsDir.getSourceFile(\"switchboard.ts\"),\n );\n\n return pipe(dirName, (dirName) => ({\n processorName: dirName,\n /* We can try to determine which processors are for `connect` and for `switchboard`.\n * If we cannot, we fallback to including them in both. */\n processorApps: conditional(\n dirName,\n [isNot(isIncludedIn(connectProcessorNames)), constant([\"switchboard\"])],\n [isNot(isIncludedIn(switchboardProcessorNames)), constant([\"connect\"])],\n constant([\"switchboard\", \"connect\"]),\n ),\n processorType: pipe(\n // handle the old `index.ts` file name if `processor.ts` has not been generated\n processorDir.getSourceFile(\"processor.ts\") ??\n processorDir.getSourceFile(\"index.ts\"),\n getAllImportNames,\n // we have to check what type is imported to determine whether the processor is `relationalDb` or `analytics`\n conditional(\n [\n (specifiers) => isIncludedIn(\"RelationalDbProcessor\", specifiers),\n constant(\"relationalDb\"),\n ],\n [\n (specifiers) => isIncludedIn(\"IAnalyticsStore\", specifiers),\n constant(\"analytics\"),\n ],\n constant(\"analytics\"),\n ),\n ),\n documentTypes: getStringArrayPropertyElements(\n processorDir.getSourceFile(\"factory.ts\"),\n \"documentTypes\",\n ),\n }));\n}\n\nconst getProcessorNames = (sourceFile: SourceFile | undefined) =>\n pipe(\n sourceFile,\n getAllImportModuleSpecifiers,\n filter(startsWith(\"processors/\")),\n map(split(\"/\")),\n map((s) => s.at(1)),\n filter(isString),\n );\n","import { join } from \"path\";\nimport { find, pipe } from \"remeda\";\nimport { SyntaxKind, type Project } from \"ts-morph\";\nimport { getOrCreateDirectory } from \"utils\";\n\nexport function getSubgraphMetadata(project: Project, dirName: string) {\n const { directory: subgraphDir } = getOrCreateDirectory(\n project,\n join(\"subgraphs\", dirName),\n );\n\n return pipe(\n subgraphDir.getSourceFile(\"index.ts\")?.getClasses() ?? [],\n find(\n (classDeclaration) =>\n classDeclaration.getBaseClass()?.getText().includes(\"BaseSubgraph\") ??\n false,\n ),\n (classDeclaration) =>\n classDeclaration\n ?.getInstanceProperty(\"name\")\n ?.asKind(SyntaxKind.PropertyDeclaration)\n ?.getInitializerIfKind(SyntaxKind.StringLiteral)\n ?.getLiteralValue(),\n (subgraphName) => ({ subgraphName }),\n );\n}\n","import path from \"path\";\nimport type { Project } from \"ts-morph\";\n\n/** Gets a SourceFile by name in a ts-morph Project, or creates a new one\n * if none with that path exists.\n */\nexport function getOrCreateSourceFile(project: Project, filePath: string) {\n const dirName = path.dirname(filePath);\n if (!project.getDirectory(dirName)) {\n project.createDirectory(dirName);\n }\n const sourceFile = project.getSourceFile(filePath);\n if (!sourceFile) {\n const newSourceFile = project.createSourceFile(filePath, \"\", {\n overwrite: true,\n });\n return {\n alreadyExists: false,\n sourceFile: newSourceFile,\n };\n }\n return {\n alreadyExists: true,\n sourceFile,\n };\n}\n\n/** Gets a Directory by name in a ts-morph Project, or creates a new one\n * if none with that path exists.\n */\nexport function getOrCreateDirectory(project: Project, dirPath: string) {\n const directory = project.getDirectory(dirPath);\n if (!directory) {\n const newDirectory = project.createDirectory(dirPath);\n return {\n alreadyExists: false,\n directory: newDirectory,\n };\n }\n return {\n alreadyExists: true,\n directory,\n };\n}\n\n/** Ensures that the directories at the given paths exist within the\n * ts-morph Project\n */\nexport async function ensureDirectoriesExist(\n project: Project,\n ...pathsToEnsure: string[]\n) {\n for (const dirPath of pathsToEnsure) {\n const dir = project.getDirectory(dirPath);\n if (!dir) {\n const newDir = project.createDirectory(dirPath);\n await newDir.save();\n }\n }\n}\n\nexport function getPreviousVersionSourceFile(args: {\n project: Project;\n version: number;\n filePath: string;\n}) {\n const { project, version, filePath } = args;\n const previousVersion = version - 1;\n if (previousVersion < 1) return;\n const previousVersionFilePath = filePath.replace(\n `/v${version}/`,\n `/v${previousVersion}/`,\n );\n\n const previousVersionFile = project.getSourceFile(previousVersionFilePath);\n\n return previousVersionFile;\n}\n","import type { SourceFile } from \"ts-morph\";\nimport { ts } from \"ts-morph\";\n\n/** Builds a ts-morph ObjectLiteralExpression from a ts/js object\n * Useful for substituting the value of a runtime object in templates\n */\nexport function buildObjectLiteral(\n inputObject: object,\n sourceFile: SourceFile,\n) {\n const propertyAssignments: ts.PropertyAssignment[] = [];\n for (const [key, value] of Object.entries(inputObject)) {\n const propertyAssignment = buildPropertyAssignment(key, value);\n propertyAssignments.push(propertyAssignment);\n }\n const objectLiteral = ts.factory.createObjectLiteralExpression(\n propertyAssignments,\n true,\n );\n\n const printNode = buildNodePrinter(sourceFile);\n return printNode(objectLiteral);\n}\n\nfunction buildFalse() {\n return ts.factory.createFalse();\n}\n\nfunction buildTrue() {\n return ts.factory.createTrue();\n}\n\nfunction buildBoolean(value: boolean) {\n return value ? buildTrue() : buildFalse();\n}\n\nfunction buildNull() {\n return ts.factory.createNull();\n}\n\nfunction buildUndefined() {\n return ts.factory.createIdentifier(\"undefined\");\n}\n\nfunction buildNumericLiteral(value: number) {\n return ts.factory.createNumericLiteral(value);\n}\n\nexport function buildStringLiteral(value: string) {\n return ts.factory.createStringLiteral(value);\n}\n\nfunction buildArrayLiteral(elements: ts.Expression[]) {\n return ts.factory.createArrayLiteralExpression(elements, true);\n}\n\nfunction valueToExpression(value: unknown): ts.Expression {\n if (value === null) return buildNull();\n if (value === undefined) return buildUndefined();\n if (typeof value === \"boolean\") return buildBoolean(value);\n if (typeof value === \"string\") return buildStringLiteral(value);\n if (typeof value === \"number\") return buildNumericLiteral(value);\n\n if (Array.isArray(value)) {\n const elements = value.map((item) => valueToExpression(item));\n return buildArrayLiteral(elements);\n }\n\n if (typeof value === \"object\") {\n return ts.factory.createObjectLiteralExpression(\n Object.entries(value).map(([key, v]) => {\n const name = ts.factory.createIdentifier(key);\n return ts.factory.createPropertyAssignment(name, valueToExpression(v));\n }),\n true,\n );\n }\n\n throw new Error(\"Invalid value passed: \", value);\n}\n\nfunction buildPropertyAssignment(name: string, value: unknown) {\n const nameIdentifier = ts.factory.createIdentifier(name);\n const valueExpression = valueToExpression(value);\n\n const propertyAssignment = ts.factory.createPropertyAssignment(\n nameIdentifier,\n valueExpression,\n );\n\n return propertyAssignment;\n}\n\nfunction buildNodePrinter(sourceFile: SourceFile) {\n const printer = ts.createPrinter({ newLine: ts.NewLineKind.LineFeed });\n return (node: ts.Node) =>\n printer.printNode(ts.EmitHint.Unspecified, node, sourceFile.compilerNode);\n}\n","import { fileExistsSync, isDirectory } from \"@powerhousedao/shared/clis\";\nimport {\n DocumentModelGlobalStateSchema,\n type DocumentModelGlobalState,\n} from \"@powerhousedao/shared/document-model\";\nimport { type Dirent } from \"fs\";\nimport { loadJsonFileSync } from \"load-json-file\";\nimport path from \"path\";\nimport {\n conditional,\n constant,\n filter,\n find,\n flatMap,\n isDefined,\n isIncludedIn,\n isStrictEqual,\n isString,\n map,\n pipe,\n when,\n} from \"remeda\";\nimport type {\n ObjectLiteralExpression,\n SourceFile,\n VariableStatement,\n} from \"ts-morph\";\nimport { SyntaxKind } from \"ts-morph\";\n\n/** Returns a ts-morph ObjectLiteralExpression from a variable statement\n * if the type matches\n */\nexport function getObjectLiteral(statement: VariableStatement | undefined) {\n return statement\n ?.getDeclarations()\n .at(0)\n ?.getInitializerIfKind(SyntaxKind.ObjectLiteralExpression);\n}\n\n/** Returns the value of a property in a ts-morph ObjectLiteralExpression of type T if it exists */\nexport function getObjectProperty<T extends SyntaxKind>(\n object: ObjectLiteralExpression | undefined,\n propertyName: string,\n propertyType: T,\n) {\n return object\n ?.getProperty(propertyName)\n ?.asKind(SyntaxKind.PropertyAssignment)\n ?.getChildren()\n .find((child) => child.getKind() === propertyType)\n ?.asKindOrThrow(propertyType);\n}\n\nexport function getVariableDeclarationByTypeName(\n sourceFile: SourceFile,\n typeName: string,\n) {\n const declaration = sourceFile.getVariableDeclaration((declaration) => {\n // First try matching the type annotation text (more reliable when types\n // can't be fully resolved, e.g. in external projects with linked deps)\n const typeAnnotation = declaration.getTypeNode()?.getText() ?? \"\";\n if (typeAnnotation.includes(typeName)) return true;\n // Fall back to resolved type text\n return declaration.getType().getText().includes(typeName);\n });\n return declaration;\n}\n\nexport function getProperyAssignmentByName(\n sourceFile: SourceFile | undefined,\n propertyName: string,\n) {\n if (!isDefined(sourceFile)) return undefined;\n\n return find(\n sourceFile.getDescendantsOfKind(SyntaxKind.PropertyAssignment),\n (assignment) => isStrictEqual(assignment.getName(), propertyName),\n );\n}\n\nexport function getStringPropertyValue(\n sourceFile: SourceFile | undefined,\n propertyName: string,\n) {\n return getProperyAssignmentByName(sourceFile, propertyName)\n ?.getFirstDescendantByKind(SyntaxKind.StringLiteral)\n ?.getLiteralValue();\n}\n\nexport function getStringArrayPropertyElements(\n sourceFile: SourceFile | undefined,\n propertyName: string,\n) {\n return pipe(\n getProperyAssignmentByName(sourceFile, propertyName)\n ?.getFirstDescendantByKind(SyntaxKind.ArrayLiteralExpression)\n ?.getElements() ?? [],\n map((element) =>\n element.asKind(SyntaxKind.StringLiteral)?.getLiteralValue(),\n ),\n filter(isString),\n );\n}\n\nexport function getBooleanPropertyValue(\n sourceFile: SourceFile | undefined,\n propertyName: string,\n fallback?: boolean,\n) {\n return pipe(\n getProperyAssignmentByName(sourceFile, propertyName)?.getDescendants() ??\n [],\n map((descendant) => descendant.getKind()),\n conditional(\n [(kinds) => isIncludedIn(SyntaxKind.TrueKeyword, kinds), constant(true)],\n [\n (kinds) => isIncludedIn(SyntaxKind.FalseKeyword, kinds),\n constant(false),\n ],\n constant(fallback),\n ),\n );\n}\n\nexport function loadDocumentModelInDir(\n dirent: Dirent | undefined,\n): DocumentModelGlobalState | undefined {\n if (!isDirectory(dirent)) return undefined;\n\n const parseResult = pipe(\n dirent,\n (dir) => path.join(dir.parentPath, `${dir.name}/${dir.name}.json`),\n when(fileExistsSync, loadJsonFileSync),\n (stateFile) => DocumentModelGlobalStateSchema().safeParse(stateFile),\n );\n\n if (!parseResult.success) {\n console.error(parseResult.error);\n return undefined;\n }\n\n return parseResult.data;\n}\n\nexport function getAllImportNames(sourceFile: SourceFile | undefined) {\n return pipe(\n sourceFile?.getImportDeclarations() ?? [],\n flatMap((importDeclaration) => importDeclaration.getNamedImports()),\n map((importSpecifier) => importSpecifier.getText()),\n );\n}\n\nexport function getAllImportModuleSpecifiers(\n sourceFile: SourceFile | undefined,\n) {\n return pipe(\n sourceFile?.getImportDeclarations() ?? [],\n flatMap((importDeclaration) =>\n importDeclaration.getModuleSpecifier().getLiteralValue(),\n ),\n );\n}\n","import path from \"path\";\nimport { IndentationText, Project } from \"ts-morph\";\n\nexport const DEFAULT_PROJECT_OPTIONS = {\n // don't add files from the tsconfig.json file, only use the ones we need\n skipAddingFilesFromTsConfig: true,\n // don't load library files, we only need the files we're adding\n skipLoadingLibFiles: true,\n // use formatting rules which match prettier\n manipulationSettings: {\n useTrailingCommas: true,\n indentationText: IndentationText.TwoSpaces,\n indentMultiLineObjectLiteralBeginningOnBlankLine: true,\n },\n} as const;\n\n/** Returns the minimal typescript config for use in ts-morph file generation */\nexport function getDefaultProjectOptions(tsConfigFilePath: string) {\n return {\n ...DEFAULT_PROJECT_OPTIONS,\n tsConfigFilePath,\n };\n}\n\n/** Instantiates a ts-morph Project using the default typescript config and nearest tsconfig.json file */\nexport function buildTsMorphProject(projectDir: string) {\n /* In general ts-morph struggles when its instance is running in a different directory to the tsconfig.json file it's using */\n process.chdir(projectDir);\n const tsConfigFilePath = path.join(projectDir, \"tsconfig.json\");\n const project = new Project({\n tsConfigFilePath,\n /* This avoids adding many files that are referenced by a given tsconfig in a monorepo\n * Probably only relevant for internal testing in this monorepo */\n skipFileDependencyResolution: true,\n });\n return project;\n}\n","/* eslint-disable @typescript-eslint/no-unsafe-assignment */\n/* eslint-disable @typescript-eslint/no-unsafe-member-access */\n/* eslint-disable @typescript-eslint/no-unsafe-argument */\nexport function getInitialStates(scopeState: { global: any; local: any }) {\n const { global, local } = scopeState;\n const scopes = { global, local };\n\n Object.entries(scopes).forEach(([scope, state]) => {\n if (!isEmptyStateSchema(state.schema) && state.initialValue === \"\") {\n throw new Error(\n `${\n scope.charAt(0).toLocaleUpperCase() + scope.slice(1)\n } scope has a defined schema but is missing an initial value.`,\n );\n }\n });\n\n return {\n initialGlobalState: handleEmptyState(global.initialValue),\n initialLocalState: handleEmptyState(local.initialValue),\n };\n}\n\nfunction isEmptyStateSchema(schema: string | string[]) {\n return schema === \"\" || !schema.includes(\"{\");\n}\n\nfunction handleEmptyState(state: string) {\n return state === \"\" ? \"{}\" : state;\n}\n","import type { DocumentModelGlobalState } from \"@powerhousedao/shared/document-model\";\n\n/**\n * Validation result for DocumentModelGlobalState code generation requirements\n */\nexport interface DocumentModelStateValidationResult {\n /** Whether the DocumentModelGlobalState is valid for code generation */\n isValid: boolean;\n /** Array of validation error messages if validation fails */\n errors: string[];\n}\n\n/**\n * Validates that a DocumentModelGlobalState has all required properties for successful code generation.\n *\n * @param documentModelState - The DocumentModelGlobalState to validate\n * @returns Validation result with isValid flag and error messages\n */\nexport function validateDocumentModelState(\n documentModelState: DocumentModelGlobalState,\n): DocumentModelStateValidationResult {\n const errors: string[] = [];\n\n // Validate top-level required properties\n if (\n !documentModelState.id ||\n typeof documentModelState.id !== \"string\" ||\n documentModelState.id.trim() === \"\"\n ) {\n errors.push('Property \"id\" is required and must be a non-empty string');\n }\n\n if (\n !documentModelState.name ||\n typeof documentModelState.name !== \"string\" ||\n documentModelState.name.trim() === \"\"\n ) {\n errors.push('Property \"name\" is required and must be a non-empty string');\n }\n\n // Extension field is optional (can be empty string)\n if (typeof documentModelState.extension !== \"string\") {\n errors.push('Property \"extension\" must be a string');\n }\n\n // Validate specifications array\n if (\n !Array.isArray(documentModelState.specifications) ||\n documentModelState.specifications.length === 0\n ) {\n errors.push(\n 'Property \"specifications\" is required and must be a non-empty array',\n );\n return { isValid: false, errors };\n }\n\n // Get the latest specification (used by code generation)\n const latestSpec =\n documentModelState.specifications[\n documentModelState.specifications.length - 1\n ];\n\n if (!latestSpec) {\n errors.push(\"Latest specification is missing or invalid\");\n return { isValid: false, errors };\n }\n\n // Validate state structure\n if (!latestSpec.state) {\n errors.push('Latest specification must have a \"state\" property');\n return { isValid: false, errors };\n }\n\n // Validate global state (required)\n if (!latestSpec.state.global) {\n errors.push('Latest specification state must have a \"global\" property');\n } else {\n const globalState = latestSpec.state.global;\n\n if (typeof globalState.schema !== \"string\") {\n errors.push('Global state \"schema\" must be a string');\n }\n\n if (typeof globalState.initialValue !== \"string\") {\n errors.push('Global state \"initialValue\" must be a string');\n }\n\n // Check if schema is non-empty but initialValue is missing\n const hasNonEmptySchema =\n globalState.schema &&\n globalState.schema.trim() !== \"\" &&\n globalState.schema.includes(\"{\");\n if (\n hasNonEmptySchema &&\n (!globalState.initialValue || globalState.initialValue.trim() === \"\")\n ) {\n errors.push(\n \"Global state has a defined schema but is missing an initial value\",\n );\n }\n }\n\n // Validate local state (required - templates directly access it)\n if (!latestSpec.state.local) {\n errors.push('Latest specification state must have a \"local\" property');\n } else {\n const localState = latestSpec.state.local;\n\n if (typeof localState.schema !== \"string\") {\n errors.push('Local state \"schema\" must be a string');\n }\n\n if (typeof localState.initialValue !== \"string\") {\n errors.push('Local state \"initialValue\" must be a string');\n }\n\n // Check if schema is non-empty but initialValue is missing\n const hasNonEmptySchema =\n localState.schema &&\n localState.schema.trim() !== \"\" &&\n localState.schema.includes(\"{\");\n if (\n hasNonEmptySchema &&\n (!localState.initialValue || localState.initialValue.trim() === \"\")\n ) {\n errors.push(\n \"Local state has a defined schema but is missing an initial value\",\n );\n }\n }\n\n // Validate modules array (required but can be empty)\n if (!Array.isArray(latestSpec.modules)) {\n errors.push('Latest specification must have a \"modules\" array');\n } else {\n // Validate that there's at least one module\n if (latestSpec.modules.length === 0) {\n errors.push(\"Latest specification must have at least one module defined\");\n }\n\n latestSpec.modules.forEach((module, moduleIndex) => {\n if (\n !module.name ||\n typeof module.name !== \"string\" ||\n module.name.trim() === \"\"\n ) {\n errors.push(\n `Module at index ${moduleIndex} must have a non-empty \"name\" property`,\n );\n }\n\n if (!Array.isArray(module.operations)) {\n errors.push(\n `Module \"${module.name || `at index ${moduleIndex}`}\" must have an \"operations\" array`,\n );\n } else {\n // Validate that each module has at least one operation\n if (module.operations.length === 0) {\n errors.push(\n `Module \"${module.name || `at index ${moduleIndex}`}\" must have at least one operation defined`,\n );\n }\n\n module.operations.forEach((operation, operationIndex) => {\n const operationId = operation.name || `at index ${operationIndex}`;\n const moduleId = module.name || `at index ${moduleIndex}`;\n\n // operation.name is required for code generation\n if (\n !operation.name ||\n typeof operation.name !== \"string\" ||\n operation.name.trim() === \"\"\n ) {\n errors.push(\n `Operation ${operationId} in module \"${moduleId}\" must have a non-empty \"name\" property`,\n );\n }\n\n // operation.schema can be null or string (required property)\n if (\n operation.schema !== null &&\n typeof operation.schema !== \"string\"\n ) {\n errors.push(\n `Operation \"${operationId}\" in module \"${moduleId}\" must have a \"schema\" that is either null or a string`,\n );\n }\n\n // operation.scope is optional - template uses `a.scope || \"global\"` fallback\n if (\n operation.scope !== undefined &&\n typeof operation.scope !== \"string\"\n ) {\n errors.push(\n `Operation \"${operationId}\" in module \"${moduleId}\" must have a \"scope\" that is a string if provided`,\n );\n }\n\n // operation.errors is required - templates directly access it\n if (!Array.isArray(operation.errors)) {\n errors.push(\n `Operation \"${operationId}\" in module \"${moduleId}\" must have an \"errors\" array`,\n );\n }\n });\n }\n });\n }\n\n return {\n isValid: errors.length === 0,\n errors,\n };\n}\n","import { pascalCase } from \"change-case\";\nimport path from \"path\";\nimport { filter, forEach, isTruthy, map, pipe, uniqueBy } from \"remeda\";\nimport { documentEditorModuleFileTemplate, editorsTemplate } from \"templates\";\nimport { SyntaxKind, type Project } from \"ts-morph\";\nimport {\n formatSourceFileWithPrettier,\n getOrCreateSourceFile,\n getVariableDeclarationByTypeName,\n} from \"utils\";\n\ntype MakeEditorModuleFileArgs = {\n project: Project;\n editorName: string;\n editorId: string;\n documentModelId?: string;\n editorDirPath: string;\n legacyMultipleDocumentTypes?: string[];\n};\n/** Generates the `module.ts` file for a document editor or app */\nexport function makeEditorModuleFile({\n project,\n editorDirPath,\n editorName,\n documentModelId,\n editorId,\n legacyMultipleDocumentTypes,\n}: MakeEditorModuleFileArgs) {\n if (documentModelId && !!legacyMultipleDocumentTypes) {\n throw new Error(\n \"Cannot specify both documentModelId and legacyMultipleDocumentTypes\",\n );\n }\n const filePath = path.join(editorDirPath, \"module.ts\");\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(\"\");\n\n const pascalCaseEditorName = pascalCase(editorName);\n const documentTypes = documentModelId\n ? `[\"${documentModelId}\"]`\n : JSON.stringify(legacyMultipleDocumentTypes);\n\n const template = documentEditorModuleFileTemplate({\n editorName,\n editorId,\n pascalCaseEditorName,\n documentTypes,\n });\n sourceFile.replaceWithText(template);\n}\n\nexport async function makeEditorsFile(args: {\n project: Project;\n editorsDirPath: string;\n}) {\n const { project, editorsDirPath } = args;\n const sourceFile = project.createSourceFile(\n path.join(editorsDirPath, \"editors.ts\"),\n editorsTemplate,\n { overwrite: true },\n );\n\n const editorsArray = sourceFile\n .getVariableDeclarationOrThrow(\"editors\")\n .getFirstDescendantByKindOrThrow(SyntaxKind.ArrayLiteralExpression);\n\n pipe(\n project.getDirectoryOrThrow(editorsDirPath).getDescendantSourceFiles(),\n filter((sourceFile) => sourceFile.getBaseName() === \"module.ts\"),\n uniqueBy((sourceFile) => sourceFile.getFilePath()),\n map((sourceFile) =>\n getVariableDeclarationByTypeName(sourceFile, \"EditorModule\"),\n ),\n filter(isTruthy),\n map((variableDeclaration) => ({\n name: variableDeclaration.getName(),\n editorDir: variableDeclaration\n .getSourceFile()\n .getDirectory()\n .getBaseName(),\n })),\n map(({ name, editorDir }) => ({\n name,\n namedImports: [name],\n moduleSpecifier: `./${path.join(editorDir, \"module.js\")}`,\n })),\n forEach(({ name, namedImports, moduleSpecifier }) => {\n sourceFile.addImportDeclaration({\n namedImports,\n moduleSpecifier,\n });\n editorsArray.addElement(name);\n }),\n );\n\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeEditorsIndexFile(args: {\n project: Project;\n editorsDirPath: string;\n}) {\n const { project, editorsDirPath } = args;\n const sourceFile = project.createSourceFile(\n path.join(editorsDirPath, \"index.ts\"),\n \"\",\n { overwrite: true },\n );\n\n pipe(\n project.getDirectoryOrThrow(editorsDirPath).getDescendantSourceFiles(),\n filter((sourceFile) => sourceFile.getBaseName() === \"module.ts\"),\n uniqueBy((sourceFile) => sourceFile.getFilePath()),\n map((sourceFile) =>\n getVariableDeclarationByTypeName(sourceFile, \"EditorModule\"),\n ),\n filter(isTruthy),\n map((variableDeclaration) => ({\n name: variableDeclaration.getName(),\n editorDir: variableDeclaration\n .getSourceFile()\n .getDirectory()\n .getBaseName(),\n })),\n map(({ name, editorDir }) => ({\n namedExports: [name],\n moduleSpecifier: `./${path.join(editorDir, \"module.js\")}`,\n })),\n forEach(({ namedExports, moduleSpecifier }) => {\n sourceFile.addExportDeclaration({\n namedExports,\n moduleSpecifier,\n });\n }),\n );\n\n await formatSourceFileWithPrettier(sourceFile);\n}\n","import type { CommonGenerateEditorArgs } from \"@powerhousedao/codegen\";\nimport { createOrUpdateManifest } from \"file-builders\";\nimport path from \"path\";\nimport {\n appConfigFileTemplate,\n appDriveContentsFileTemplate,\n appEditorFileTemplate,\n appFilesFileTemplate,\n appFoldersFileTemplate,\n createDocumentFileTemplate,\n driveExplorerFileTemplate,\n driveExplorerNavigationBreadcrumbsFileTemplate,\n emptyStateFileTemplate,\n folderTreeFileTemplate,\n} from \"templates\";\nimport { type Project } from \"ts-morph\";\nimport {\n ensureDirectoriesExist,\n formatSourceFileWithPrettier,\n getOrCreateDirectory,\n getOrCreateSourceFile,\n} from \"utils\";\nimport {\n makeEditorModuleFile,\n makeEditorsFile,\n makeEditorsIndexFile,\n} from \"./editor-common.js\";\n\ntype GenerateAppArgs = CommonGenerateEditorArgs & {\n allowedDocumentModelIds: string[];\n isDragAndDropEnabled: boolean;\n};\n/** Generates a app with the configs for `allowedDocumentModelIds` and `isDragAndDropEnabled` */\nexport async function tsMorphGenerateApp({\n project,\n editorDir,\n editorName,\n editorId,\n allowedDocumentModelIds,\n isDragAndDropEnabled,\n}: GenerateAppArgs) {\n const { directory: editorsDir } = getOrCreateDirectory(project, \"editors\");\n const editorsDirPath = editorsDir.getPath();\n const { directory: documentModelsDir } = getOrCreateDirectory(\n project,\n \"document-models\",\n );\n const documentModelsDirPath = documentModelsDir.getPath();\n const editorDirPath = path.join(editorsDirPath, editorDir);\n const projectDir = editorsDir.getParentOrThrow().getPath();\n const editorComponentsDirPath = path.join(editorDirPath, \"components\");\n\n await ensureDirectoriesExist(\n project,\n documentModelsDirPath,\n editorsDirPath,\n editorDirPath,\n editorComponentsDirPath,\n );\n\n await makeNavigationBreadcrumbsFile({\n project,\n editorComponentsDirPath,\n });\n\n await makeCreateDocumentFile({\n project,\n editorComponentsDirPath,\n });\n\n await makeEmptyStateFile({\n project,\n editorComponentsDirPath,\n });\n\n await makeFoldersFile({\n project,\n editorComponentsDirPath,\n });\n\n await makeFolderTreeFile({\n project,\n editorComponentsDirPath,\n });\n\n await makeFilesFile({\n project,\n editorComponentsDirPath,\n });\n\n await makeDriveExplorerFile({\n project,\n editorComponentsDirPath,\n });\n\n await makeDriveContentsFile({\n project,\n editorComponentsDirPath,\n });\n\n await makeAppComponent({\n project,\n editorDirPath,\n });\n\n await makeAppConfigFile({\n project,\n allowedDocumentModelIds,\n isDragAndDropEnabled,\n editorDirPath,\n });\n\n makeEditorModuleFile({\n project,\n editorName,\n editorId,\n editorDirPath,\n documentModelId: \"powerhouse/document-drive\",\n });\n\n await makeEditorsFile({ project, editorsDirPath });\n await makeEditorsIndexFile({ project, editorsDirPath });\n await createOrUpdateManifest(\n {\n apps: [\n {\n name: editorName,\n id: editorId,\n documentTypes: [\"powerhousedao/document-drive\"],\n },\n ],\n },\n projectDir,\n );\n}\n\ntype MakeAppComponentArgs = {\n project: Project;\n editorDirPath: string;\n};\nasync function makeAppComponent({\n project,\n editorDirPath,\n}: MakeAppComponentArgs) {\n const filePath = path.join(editorDirPath, \"editor.tsx\");\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n project,\n filePath,\n );\n\n if (alreadyExists) {\n const editorFunction = sourceFile.getFunction(\"Editor\");\n if (editorFunction) {\n if (!editorFunction.isDefaultExport()) {\n editorFunction.setIsDefaultExport(true);\n }\n return;\n }\n }\n const template = appEditorFileTemplate();\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\ntype MakeAppConfigFileArgs = {\n project: Project;\n editorDirPath: string;\n allowedDocumentModelIds: string[];\n isDragAndDropEnabled: boolean;\n};\nasync function makeAppConfigFile({\n project,\n editorDirPath,\n allowedDocumentModelIds,\n isDragAndDropEnabled,\n}: MakeAppConfigFileArgs) {\n const filePath = path.join(editorDirPath, \"config.ts\");\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n const allowedDocumentTypesString = JSON.stringify(allowedDocumentModelIds);\n const isDragAndDropEnabledString = isDragAndDropEnabled ? \"true\" : \"false\";\n\n const template = appConfigFileTemplate({\n isDragAndDropEnabledString,\n allowedDocumentTypesString,\n });\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\ntype MakeDriveContentsFileArgs = {\n project: Project;\n editorComponentsDirPath: string;\n};\nasync function makeDriveContentsFile({\n project,\n editorComponentsDirPath,\n}: MakeDriveContentsFileArgs) {\n const filePath = path.join(editorComponentsDirPath, \"DriveContents.tsx\");\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n project,\n filePath,\n );\n\n if (alreadyExists) return;\n\n const template = appDriveContentsFileTemplate();\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\ntype MakeNavigationBreadcrumbsFileArgs = {\n project: Project;\n editorComponentsDirPath: string;\n};\n\nasync function makeNavigationBreadcrumbsFile({\n project,\n editorComponentsDirPath,\n}: MakeNavigationBreadcrumbsFileArgs) {\n const filePath = path.join(\n editorComponentsDirPath,\n \"NavigationBreadcrumbs.tsx\",\n );\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n project,\n filePath,\n );\n\n if (alreadyExists) return;\n\n sourceFile.replaceWithText(driveExplorerNavigationBreadcrumbsFileTemplate());\n await formatSourceFileWithPrettier(sourceFile);\n}\n\ntype MakeFoldersFileArgs = {\n project: Project;\n editorComponentsDirPath: string;\n};\nasync function makeFoldersFile({\n project,\n editorComponentsDirPath,\n}: MakeFoldersFileArgs) {\n const foldersFilePath = path.join(editorComponentsDirPath, \"Folders.tsx\");\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n project,\n foldersFilePath,\n );\n\n if (alreadyExists) return;\n\n const template = appFoldersFileTemplate();\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\ntype MakeFilesFileArgs = {\n project: Project;\n editorComponentsDirPath: string;\n};\nasync function makeFilesFile({\n project,\n editorComponentsDirPath,\n}: MakeFilesFileArgs) {\n const filesFilePath = path.join(editorComponentsDirPath, \"Files.tsx\");\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n project,\n filesFilePath,\n );\n\n if (alreadyExists) return;\n\n const template = appFilesFileTemplate();\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\ntype MakeDriveExplorerFileArgs = {\n project: Project;\n editorComponentsDirPath: string;\n};\nasync function makeDriveExplorerFile({\n project,\n editorComponentsDirPath,\n}: MakeDriveExplorerFileArgs) {\n const filePath = path.join(editorComponentsDirPath, \"DriveExplorer.tsx\");\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n project,\n filePath,\n );\n\n if (alreadyExists) return;\n\n sourceFile.replaceWithText(driveExplorerFileTemplate);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\ntype MakeFolderTreeFileArgs = {\n project: Project;\n editorComponentsDirPath: string;\n};\nasync function makeFolderTreeFile({\n project,\n editorComponentsDirPath,\n}: MakeFolderTreeFileArgs) {\n const filePath = path.join(editorComponentsDirPath, \"FolderTree.tsx\");\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n project,\n filePath,\n );\n\n if (alreadyExists) return;\n\n sourceFile.replaceWithText(folderTreeFileTemplate);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\ntype MakeEmptyStateFileArgs = {\n project: Project;\n editorComponentsDirPath: string;\n};\nasync function makeEmptyStateFile({\n project,\n editorComponentsDirPath,\n}: MakeEmptyStateFileArgs) {\n const filePath = path.join(editorComponentsDirPath, \"EmptyState.tsx\");\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n project,\n filePath,\n );\n\n if (alreadyExists) return;\n\n sourceFile.replaceWithText(emptyStateFileTemplate);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\ntype MakeCreateDocumentFileArgs = {\n project: Project;\n editorComponentsDirPath: string;\n};\nasync function makeCreateDocumentFile({\n project,\n editorComponentsDirPath,\n}: MakeCreateDocumentFileArgs) {\n const filePath = path.join(editorComponentsDirPath, \"CreateDocument.tsx\");\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n project,\n filePath,\n );\n\n if (alreadyExists) return;\n\n sourceFile.replaceWithText(createDocumentFileTemplate);\n await formatSourceFileWithPrettier(sourceFile);\n}\n","import { writeFileEnsuringDir } from \"@powerhousedao/shared/clis\";\nimport {\n buildBoilerplatePackageJson,\n createOrUpdateManifest,\n} from \"file-builders\";\nimport { join } from \"path\";\nimport {\n agentsTemplate,\n buildPowerhouseConfigTemplate,\n claudeSettingsLocalTemplate,\n claudeTemplate,\n connectEntrypointTemplate,\n cursorMcpTemplate,\n dockerfileTemplate,\n documentModelsIndexTemplate,\n documentModelsTemplate,\n editorsIndexTemplate,\n editorsTemplate,\n eslintConfigTemplate,\n factoryBuildersTemplate,\n geminiSettingsTemplate,\n indexHtmlTemplate,\n indexTsTemplate,\n licenseTemplate,\n mainTsxTemplate,\n mcpTemplate,\n nginxConfTemplate,\n npmrcTemplate,\n processorsFactoryTemplate,\n processorsIndexTemplate,\n readmeTemplate,\n styleTemplate,\n subgraphsIndexTemplate,\n switchboardEntrypointTemplate,\n syncAndPublishWorkflowTemplate,\n tsConfigTemplate,\n upgradeManifestsTemplate,\n vitestConfigTemplate,\n} from \"templates\";\nimport { formatSafe } from \"utils\";\n\nexport async function writeGeneratedProjectRootFiles(projectDir: string) {\n await writeFileEnsuringDir(\n join(projectDir, \"tsconfig.json\"),\n await formatSafe(tsConfigTemplate, \"json\"),\n );\n await writeFileEnsuringDir(\n join(projectDir, \"index.html\"),\n await formatSafe(indexHtmlTemplate, \"html\"),\n );\n await writeFileEnsuringDir(\n join(projectDir, \"main.tsx\"),\n await formatSafe(mainTsxTemplate),\n );\n await writeFileEnsuringDir(\n join(projectDir, \"eslint.config.js\"),\n await formatSafe(eslintConfigTemplate),\n );\n await writeFileEnsuringDir(\n join(projectDir, \"index.ts\"),\n await formatSafe(indexTsTemplate),\n );\n await writeFileEnsuringDir(\n join(projectDir, \"style.css\"),\n await formatSafe(styleTemplate, \"css\"),\n );\n await writeFileEnsuringDir(\n join(projectDir, \"vitest.config.ts\"),\n await formatSafe(vitestConfigTemplate),\n );\n}\n\nexport async function writeGeneratedDocumentModelsFiles(projectDir: string) {\n await writeFileEnsuringDir(\n join(projectDir, \"document-models/document-models.ts\"),\n await formatSafe(documentModelsTemplate),\n );\n await writeFileEnsuringDir(\n join(projectDir, \"document-models/index.ts\"),\n await formatSafe(documentModelsIndexTemplate),\n );\n await writeFileEnsuringDir(\n join(projectDir, \"document-models/upgrade-manifests.ts\"),\n await formatSafe(upgradeManifestsTemplate),\n );\n}\n\nexport async function writeGeneratedEditorsFiles(projectDir: string) {\n await writeFileEnsuringDir(\n join(projectDir, \"editors/editors.ts\"),\n await formatSafe(editorsTemplate),\n );\n await writeFileEnsuringDir(\n join(projectDir, \"editors/index.ts\"),\n await formatSafe(editorsIndexTemplate),\n );\n}\n\nexport async function writeGeneratedProcessorsFiles(projectDir: string) {\n await writeFileEnsuringDir(\n join(projectDir, \"processors/factory.ts\"),\n await formatSafe(processorsFactoryTemplate),\n );\n await writeFileEnsuringDir(\n join(projectDir, \"processors/index.ts\"),\n await formatSafe(processorsIndexTemplate),\n );\n await writeFileEnsuringDir(\n join(projectDir, \"processors/connect.ts\"),\n await formatSafe(factoryBuildersTemplate),\n );\n await writeFileEnsuringDir(\n join(projectDir, \"processors/switchboard.ts\"),\n await formatSafe(factoryBuildersTemplate),\n );\n await writeFileEnsuringDir(\n join(projectDir, \"processors/index.ts\"),\n await formatSafe(processorsIndexTemplate),\n );\n}\n\nexport async function writeGeneratedSubgraphsFiles(projectDir: string) {\n await writeFileEnsuringDir(\n join(projectDir, \"subgraphs/index.ts\"),\n await formatSafe(subgraphsIndexTemplate),\n );\n}\n\nexport async function writeModuleFiles(projectDir = process.cwd()) {\n await writeGeneratedDocumentModelsFiles(projectDir);\n await writeGeneratedEditorsFiles(projectDir);\n await writeGeneratedProcessorsFiles(projectDir);\n await writeGeneratedSubgraphsFiles(projectDir);\n}\n\nexport async function writeAiConfigFiles(projectDir = process.cwd()) {\n await writeFileEnsuringDir(join(projectDir, \"CLAUDE.md\"), claudeTemplate);\n await writeFileEnsuringDir(join(projectDir, \"AGENTS.md\"), agentsTemplate);\n await writeFileEnsuringDir(join(projectDir, \".mcp.json\"), mcpTemplate);\n await writeFileEnsuringDir(\n join(projectDir, \".gemini/settings.json\"),\n geminiSettingsTemplate,\n );\n await writeFileEnsuringDir(\n join(projectDir, \".cursor/mcp.json\"),\n cursorMcpTemplate,\n );\n await writeFileEnsuringDir(\n join(projectDir, \".claude/settings.local.json\"),\n claudeSettingsLocalTemplate,\n );\n}\n\nexport async function writeProjectRootFiles(\n args: {\n name: string;\n tag?: string;\n version?: string;\n remoteDrive?: string;\n },\n projectDir = process.cwd(),\n) {\n const { name, tag, version, remoteDrive } = args;\n await writeFileEnsuringDir(\"LICENSE\", licenseTemplate);\n await writeFileEnsuringDir(\"README.md\", readmeTemplate);\n await writeFileEnsuringDir(\".npmrc\", npmrcTemplate);\n const packageJson = await buildBoilerplatePackageJson({\n name,\n tag,\n version,\n });\n await createOrUpdateManifest({ name }, projectDir);\n const powerhouseConfig = await buildPowerhouseConfigTemplate({\n tag,\n version,\n remoteDrive,\n });\n await writeFileEnsuringDir(\"powerhouse.config.json\", powerhouseConfig);\n await writeFileEnsuringDir(\"package.json\", packageJson);\n}\n\nexport async function writeCIFiles(projectDir = process.cwd()) {\n await writeFileEnsuringDir(\n join(projectDir, \".github/workflows/sync-and-publish.yml\"),\n syncAndPublishWorkflowTemplate,\n );\n await writeFileEnsuringDir(\n join(projectDir, \"Dockerfile\"),\n dockerfileTemplate,\n );\n await writeFileEnsuringDir(\n join(projectDir, \"docker/nginx.conf\"),\n nginxConfTemplate,\n );\n await writeFileEnsuringDir(\n join(projectDir, \"docker/connect-entrypoint.sh\"),\n connectEntrypointTemplate,\n );\n await writeFileEnsuringDir(\n join(projectDir, \"docker/switchboard-entrypoint.sh\"),\n switchboardEntrypointTemplate,\n );\n}\n\nexport async function writeAllGeneratedProjectFiles(\n projectDir = process.cwd(),\n) {\n await writeGeneratedProjectRootFiles(projectDir);\n await writeModuleFiles(projectDir);\n await writeAiConfigFiles(projectDir);\n await writeCIFiles(projectDir);\n}\n","import {\n makeVersionedDependencies,\n VERSIONED_DEPENDENCIES,\n VERSIONED_DEV_DEPENDENCIES,\n} from \"@powerhousedao/shared/clis\";\nimport { packageJsonTemplate } from \"templates\";\n\nexport async function buildBoilerplatePackageJson(args: {\n name: string;\n tag?: string;\n version?: string;\n workspace?: boolean;\n}) {\n const { name, tag, version, workspace } = args;\n const versionedDependencies = await makeVersionedDependencies({\n names: VERSIONED_DEPENDENCIES,\n tag,\n version,\n });\n const versionedDevDependencies = await makeVersionedDependencies({\n names: VERSIONED_DEV_DEPENDENCIES,\n tag,\n version,\n });\n\n const template = packageJsonTemplate(\n name,\n versionedDependencies,\n versionedDevDependencies,\n );\n\n return template;\n}\n","import type { CommandEntry, CommandHelpInfo } from \"@powerhousedao/codegen\";\nimport { writeFile } from \"node:fs/promises\";\nimport { stripVTControlCharacters } from \"node:util\";\nimport { docsFromCliHelpTemplate } from \"templates\";\nexport function getCommandHelpInfo<TEntry extends CommandEntry>(\n entry: TEntry,\n): CommandHelpInfo {\n const name = entry.name;\n const description = entry.command.description ?? \"\";\n const helpTopics = entry.command.helpTopics?.() ?? [];\n return {\n name,\n description,\n helpTopics,\n };\n}\n\nexport function getCommandsHelpInfo<TEntry extends CommandEntry>(\n entries: TEntry[],\n) {\n return entries.map(getCommandHelpInfo);\n}\n\nexport function makeCliDocsFromHelp<TEntry extends CommandEntry>(args: {\n cliDescription: string;\n docsTitle: string;\n docsIntroduction: string;\n entries: TEntry[];\n}) {\n const { cliDescription, docsIntroduction, docsTitle, entries } = args;\n const commandsHelpInfo = getCommandsHelpInfo(entries);\n\n const template = docsFromCliHelpTemplate({\n cliDescription,\n docsIntroduction,\n docsTitle,\n commandsHelpInfo,\n });\n\n const templateWithAnsiEscapesRemoved = stripVTControlCharacters(template);\n\n return templateWithAnsiEscapesRemoved;\n}\n\nexport async function writeCliDocsMarkdownFile<\n TEntry extends CommandEntry,\n>(args: {\n filePath: string;\n cliDescription: string;\n docsTitle: string;\n docsIntroduction: string;\n entries: TEntry[];\n}) {\n const { filePath, ...restArgs } = args;\n const markdownFileContent = makeCliDocsFromHelp(restArgs);\n\n await writeFile(filePath, markdownFileContent, {\n encoding: \"utf-8\",\n });\n}\n","import type {\n CommonGenerateEditorArgs,\n EditorVariableNames,\n} from \"@powerhousedao/codegen\";\nimport { createOrUpdateManifest } from \"file-builders\";\nimport { getEditorVariableNames } from \"name-builders\";\nimport path from \"path\";\nimport { documentEditorEditorFileTemplate } from \"templates\";\nimport { type Project } from \"ts-morph\";\nimport {\n ensureDirectoriesExist,\n formatSourceFileWithPrettier,\n getDocumentTypeMetadata,\n getOrCreateDirectory,\n getOrCreateSourceFile,\n} from \"utils\";\nimport {\n makeEditorModuleFile,\n makeEditorsFile,\n makeEditorsIndexFile,\n} from \"./editor-common.js\";\n\ntype GenerateEditorArgs = CommonGenerateEditorArgs & {\n documentModelId: string;\n};\n/** Generates a document editor for the given `documentModelId` (also called `documentType`) */\nexport async function tsMorphGenerateDocumentEditor({\n project,\n editorDir,\n editorName,\n editorId,\n documentModelId,\n}: GenerateEditorArgs) {\n const { directory: documentModelsDir } = getOrCreateDirectory(\n project,\n \"document-models\",\n );\n const documentModelsDirPath = documentModelsDir.getPath();\n const { directory: editorsDir } = getOrCreateDirectory(project, \"editors\");\n const editorsDirPath = editorsDir.getPath();\n const projectDir = editorsDir.getParentOrThrow().getPath();\n const editorDirPath = path.join(editorsDirPath, editorDir);\n const componentsDirPath = path.join(editorDirPath, \"components\");\n\n await ensureDirectoriesExist(\n project,\n documentModelsDirPath,\n editorsDirPath,\n editorDirPath,\n componentsDirPath,\n );\n const documentTypeMetadata = getDocumentTypeMetadata({\n project,\n documentModelId,\n });\n\n const editorVariableNames = getEditorVariableNames(documentTypeMetadata);\n\n await makeEditorComponent({\n project,\n editorDirPath,\n ...documentTypeMetadata,\n ...editorVariableNames,\n });\n\n makeEditorModuleFile({\n project,\n editorName,\n editorId,\n documentModelId,\n editorDirPath,\n });\n\n await makeEditorsFile({ project, editorsDirPath });\n await makeEditorsIndexFile({ project, editorsDirPath });\n await createOrUpdateManifest(\n {\n editors: [\n {\n name: editorName,\n id: editorId,\n documentTypes: [documentTypeMetadata.documentModelId],\n },\n ],\n },\n projectDir,\n );\n}\n\ntype MakeEditorComponentArgs = EditorVariableNames & {\n project: Project;\n editorDirPath: string;\n documentModelDocumentTypeName: string;\n documentModelImportPath: string;\n};\nasync function makeEditorComponent(args: MakeEditorComponentArgs) {\n const { project, editorDirPath } = args;\n const filePath = path.join(editorDirPath, \"editor.tsx\");\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n project,\n filePath,\n );\n\n if (alreadyExists) {\n const functionDeclaration = sourceFile.getFunction(\"Editor\");\n if (functionDeclaration) {\n if (!functionDeclaration.isDefaultExport()) {\n functionDeclaration.setIsDefaultExport(true);\n }\n return;\n }\n }\n\n const template = documentEditorEditorFileTemplate(args);\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n","import type { CodegenConfig } from \"@graphql-codegen/cli\";\nimport { generate } from \"@graphql-codegen/cli\";\nimport type { TypeScriptPluginConfig } from \"@graphql-codegen/typescript\";\nimport {\n generatorTypeDefs,\n validationSchema,\n} from \"@powerhousedao/document-engineering/graphql\";\nimport type {\n DocumentSpecification,\n ModuleSpecification,\n} from \"@powerhousedao/shared/document-model\";\nimport type { DocumentModelFileMakerArgs } from \"file-builders\";\nimport type { ValidationSchemaPluginConfig } from \"graphql-codegen-typescript-validation-schema\";\nimport fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport { format } from \"prettier\";\n\nexport const scalars = {\n Unknown: \"unknown\",\n DateTime: \"string\",\n Attachment: \"string\",\n Address: \"`${string}:0x${string}`\",\n ...(generatorTypeDefs as Record<string, string>),\n};\n\nexport const scalarsValidation = {\n Unknown: \"z.unknown()\",\n DateTime: \"z.string().datetime()\",\n Attachment: \"z.string()\",\n Address:\n \"z.custom<`${string}:0x${string}`>((val) => /^[a-zA-Z0-9]+:0x[a-fA-F0-9]{40}$/.test(val as string))\",\n ...(validationSchema as Record<string, string>),\n};\n\nconst avoidOptionals: TypeScriptPluginConfig[\"avoidOptionals\"] = {\n field: true,\n inputValue: false,\n};\nconst maybeValue = \"T | null | undefined\";\nconst typescriptConfig: TypeScriptPluginConfig = {\n avoidOptionals,\n scalars,\n strictScalars: true,\n enumsAsTypes: true,\n skipTypename: true,\n maybeValue,\n};\n\nconst validationSchemaConfig: ValidationSchemaPluginConfig = {\n avoidOptionals,\n scalars,\n strictScalars: true,\n enumsAsTypes: true,\n skipTypename: true,\n importFrom: `./types.js`,\n schema: \"zodv4\",\n useTypeImports: true,\n scalarSchemas: scalarsValidation,\n directives: {\n equals: {\n value: [\"regex\", \"/^$1$/\"],\n },\n },\n withObjectType: true,\n maybeValue,\n};\n\nfunction buildSchemasForModules(modules: ModuleSpecification[]) {\n const schemaStrings: string[] = [];\n for (const module of modules) {\n schemaStrings.push(`# ${module.name}`);\n const operationsSchemas = module.operations\n .map((operation) => operation.schema)\n .filter((schema) => schema !== null);\n schemaStrings.push(...operationsSchemas);\n }\n return schemaStrings;\n}\n\nfunction buildGraphqlDocumentStringForSpecification(\n specification: DocumentSpecification,\n) {\n const customScalarSchemas = Object.keys(scalars)\n .map((k) => `scalar ${k}`)\n .join(\"\\n\");\n const stateSchemas = Object.values(specification.state).map(\n (state) => state.schema,\n );\n const moduleSchemas = buildSchemasForModules(specification.modules);\n\n return [customScalarSchemas, ...stateSchemas, ...moduleSchemas];\n}\n\nasync function formatContentWithPrettier(path: string, content: string) {\n const formattedContent = await format(content, {\n parser: \"typescript\",\n });\n return formattedContent;\n}\n\ntype GenerateTypesAndZodSchemasFromGraphqlArgs = {\n schemaDirPath: string;\n schema: string;\n};\nexport async function generateTypesAndZodSchemasFromGraphql(\n args: GenerateTypesAndZodSchemasFromGraphqlArgs,\n) {\n const { schemaDirPath, schema } = args;\n\n const config: CodegenConfig = {\n overwrite: true,\n watch: false,\n hooks: {\n beforeOneFileWrite: formatContentWithPrettier,\n },\n generates: {\n [`${schemaDirPath}/types.ts`]: {\n schema,\n config: typescriptConfig,\n plugins: [\n {\n typescript: typescriptConfig,\n },\n ],\n },\n [`${schemaDirPath}/zod.ts`]: {\n schema,\n config: validationSchemaConfig,\n plugins: [\n {\n add: {\n content:\n \"/* eslint-disable @typescript-eslint/no-empty-object-type */\",\n },\n },\n {\n add: {\n content: \"/* eslint-disable @typescript-eslint/no-unused-vars */\",\n },\n },\n {\n \"graphql-codegen-typescript-validation-schema\":\n validationSchemaConfig,\n },\n ],\n },\n },\n };\n\n await generate(config, true);\n}\n\nexport async function generateDocumentModelZodSchemas(\n args: DocumentModelFileMakerArgs,\n) {\n const { specification, schemaDirPath, versionDirPath } = args;\n\n const schema = buildGraphqlDocumentStringForSpecification(specification)\n .filter(Boolean)\n .join(\"\\n\\n\");\n\n await generateTypesAndZodSchemasFromGraphql({\n schemaDirPath,\n schema,\n });\n\n await fs.writeFile(path.join(versionDirPath, \"schema.graphql\"), schema);\n}\n","import { kebabCase } from \"change-case\";\nimport type {\n DocumentModelFileMakerArgs,\n DocumentModelModuleFileMakerArgs,\n} from \"file-builders\";\nimport path from \"path\";\nimport {\n documentModelDocumentSchemaFileTemplate,\n documentModelDocumentTypeTemplate,\n documentModelGenActionsFileTemplate,\n documentModelGenControllerFileTemplate,\n documentModelGenCreatorsFileTemplate,\n documentModelGenIndexFileTemplate,\n documentModelGenReducerFileTemplate,\n documentModelGenTypesTemplate,\n documentModelGenUtilsTemplate,\n documentModelOperationModuleActionsFileTemplate,\n documentModelOperationsModuleCreatorsFileTemplate,\n documentModelOperationsModuleErrorFileTemplate,\n documentModelOperationsModuleOperationsFileTemplate,\n documentModelPhFactoriesFileTemplate,\n documentModelSchemaIndexTemplate,\n} from \"templates\";\nimport { VariableDeclarationKind } from \"ts-morph\";\nimport {\n buildObjectLiteral,\n formatSourceFileWithPrettier,\n getOrCreateSourceFile,\n} from \"utils\";\n\nexport async function makeDocumentModelGenDirOperationModulesFiles(\n fileMakerArgs: DocumentModelFileMakerArgs,\n) {\n for (const module of fileMakerArgs.specification.modules) {\n await makeGenDirOperationModuleFiles({\n ...fileMakerArgs,\n module,\n });\n }\n}\n\nexport async function makeGenDirOperationModuleFiles(\n fileMakerArgs: DocumentModelModuleFileMakerArgs,\n) {\n await makeOperationModuleGenActionsFile(fileMakerArgs);\n await makeOperationModuleGenCreatorsFile(fileMakerArgs);\n await makeOperationModuleGenOperationsFile(fileMakerArgs);\n await makeOperationModuleGenErrorFile(fileMakerArgs);\n}\n\nexport async function makeDocumentModelGenUtilsFile(\n args: DocumentModelFileMakerArgs,\n) {\n const template = documentModelGenUtilsTemplate(args);\n const { project, genDirPath } = args;\n const utilsFilePath = path.join(genDirPath, \"utils.ts\");\n const { sourceFile } = getOrCreateSourceFile(project, utilsFilePath);\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeDocumentModelDocumentTypeFile(\n args: DocumentModelFileMakerArgs,\n) {\n const template = documentModelDocumentTypeTemplate(args);\n const { project, genDirPath } = args;\n\n const filePath = path.join(genDirPath, \"document-type.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeDocumentModelSchemaIndexFile(\n args: DocumentModelFileMakerArgs,\n) {\n const template = documentModelSchemaIndexTemplate;\n const { project, schemaDirPath } = args;\n const filePath = path.join(schemaDirPath, \"index.ts\");\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeDocumentModelGenTypesFile(\n args: DocumentModelFileMakerArgs,\n) {\n const template = documentModelGenTypesTemplate(args);\n const { project, genDirPath } = args;\n\n const filePath = path.join(genDirPath, \"types.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeDocumentModelGenDocumentModelFile(\n args: DocumentModelFileMakerArgs,\n) {\n const { project, genDirPath, documentModelState } = args;\n const filePath = path.join(genDirPath, \"document-model.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(\"\");\n\n sourceFile.addImportDeclaration({\n namedImports: [\"DocumentModelGlobalState\"],\n moduleSpecifier: \"document-model\",\n isTypeOnly: true,\n });\n\n const documentModelStateString = buildObjectLiteral(\n documentModelState,\n sourceFile,\n );\n\n sourceFile.addVariableStatement({\n declarationKind: VariableDeclarationKind.Const,\n isExported: true,\n declarations: [\n {\n name: \"documentModel\",\n type: \"DocumentModelGlobalState\",\n initializer: documentModelStateString,\n },\n ],\n });\n\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeDocumentModelGenDocumentSchemaFile(\n args: DocumentModelFileMakerArgs,\n) {\n const template = documentModelDocumentSchemaFileTemplate(args);\n const { project, genDirPath } = args;\n\n const filePath = path.join(genDirPath, \"document-schema.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeDocumentModelGenCreatorsFile(\n args: DocumentModelFileMakerArgs,\n) {\n const template = documentModelGenCreatorsFileTemplate(args);\n const { project, genDirPath } = args;\n\n const filePath = path.join(genDirPath, \"creators.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeDocumentModelGenPhFactoriesFile(\n args: DocumentModelFileMakerArgs,\n) {\n const template = documentModelPhFactoriesFileTemplate(args);\n const { project, genDirPath } = args;\n\n const filePath = path.join(genDirPath, \"ph-factories.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeDocumentModelGenControllerFile(\n args: DocumentModelFileMakerArgs,\n) {\n const template = documentModelGenControllerFileTemplate(args);\n const { project, genDirPath } = args;\n\n const filePath = path.join(genDirPath, \"controller.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeDocumentModelGenIndexFile(\n args: DocumentModelFileMakerArgs,\n) {\n const template = documentModelGenIndexFileTemplate(args);\n const { project, genDirPath } = args;\n\n const filePath = path.join(genDirPath, \"index.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeDocumentModelGenActionsFile(\n args: DocumentModelFileMakerArgs,\n) {\n const template = documentModelGenActionsFileTemplate(args);\n const { project, genDirPath } = args;\n\n const filePath = path.join(genDirPath, \"actions.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeDocumentModelGenReducerFile(\n args: DocumentModelFileMakerArgs,\n) {\n const template = documentModelGenReducerFileTemplate(args);\n const { project, genDirPath } = args;\n\n const filePath = path.join(genDirPath, \"reducer.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeOperationModuleGenActionsFile(\n args: DocumentModelModuleFileMakerArgs,\n) {\n const { module } = args;\n const kebabCaseModuleName = kebabCase(module.name);\n const template = documentModelOperationModuleActionsFileTemplate(args);\n const { project, genDirPath } = args;\n\n const dirPath = path.join(genDirPath, kebabCaseModuleName);\n const filePath = path.join(dirPath, \"actions.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeOperationModuleGenCreatorsFile(\n args: DocumentModelModuleFileMakerArgs,\n) {\n const { module } = args;\n const kebabCaseModuleName = kebabCase(module.name);\n const template = documentModelOperationsModuleCreatorsFileTemplate(args);\n const { project, genDirPath } = args;\n\n const dirPath = path.join(genDirPath, kebabCaseModuleName);\n const filePath = path.join(dirPath, \"creators.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeOperationModuleGenOperationsFile(\n args: DocumentModelModuleFileMakerArgs,\n) {\n const { module } = args;\n const kebabCaseModuleName = kebabCase(module.name);\n const template = documentModelOperationsModuleOperationsFileTemplate(args);\n const { project, genDirPath } = args;\n\n const dirPath = path.join(genDirPath, kebabCaseModuleName);\n const filePath = path.join(dirPath, \"operations.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeOperationModuleGenErrorFile(\n args: DocumentModelModuleFileMakerArgs,\n) {\n const { module } = args;\n const kebabCaseModuleName = kebabCase(module.name);\n const template = documentModelOperationsModuleErrorFileTemplate(args);\n const { project, genDirPath } = args;\n\n const dirPath = path.join(genDirPath, kebabCaseModuleName);\n\n const filePath = path.join(dirPath, \"error.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n","import type { DocumentModelFileMakerArgs } from \"@powerhousedao/codegen\";\nimport path from \"path\";\nimport {\n documentModelHooksFileTemplate,\n documentModelIndexTemplate,\n documentModelModuleFileTemplate,\n documentModelRootActionsFileTemplate,\n documentModelUtilsTemplate,\n} from \"templates\";\nimport { formatSourceFileWithPrettier, getOrCreateSourceFile } from \"utils\";\n\nexport async function makeDocumentModelVersionIndexFile(\n args: DocumentModelFileMakerArgs,\n) {\n const template = documentModelIndexTemplate;\n const { project, versionDirPath } = args;\n\n const filePath = path.join(versionDirPath, \"index.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeDocumentModelUtilsFile(\n args: DocumentModelFileMakerArgs,\n) {\n const template = documentModelUtilsTemplate(args);\n const { project, versionDirPath } = args;\n\n const filePath = path.join(versionDirPath, \"utils.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeDocumentModelRootActionsFile(\n args: DocumentModelFileMakerArgs,\n) {\n const template = documentModelRootActionsFileTemplate(args);\n const { project, versionDirPath } = args;\n\n const filePath = path.join(versionDirPath, \"actions.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeDocumentModelHooksFile(\n args: DocumentModelFileMakerArgs,\n) {\n const template = documentModelHooksFileTemplate(args);\n const { project, versionDirPath } = args;\n\n const filePath = path.join(versionDirPath, \"hooks.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeDocumentModelModuleFile(\n args: DocumentModelFileMakerArgs,\n) {\n const { project, versionDirPath } = args;\n const template = documentModelModuleFileTemplate(args);\n\n const moduleFilePath = path.join(versionDirPath, \"module.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, moduleFilePath);\n\n sourceFile.replaceWithText(template);\n\n await formatSourceFileWithPrettier(sourceFile);\n}\n","import type {\n DocumentModelFileMakerArgs,\n DocumentModelModuleFileMakerArgs,\n} from \"@powerhousedao/codegen\";\nimport type { ModuleSpecification } from \"@powerhousedao/shared/document-model\";\nimport { ts } from \"@tmpl/core\";\nimport { camelCase, kebabCase, pascalCase } from \"change-case\";\nimport path from \"path\";\nimport {\n documentModelSrcIndexFileTemplate,\n documentModelSrcUtilsTemplate,\n} from \"templates\";\nimport type { SourceFile } from \"ts-morph\";\nimport { VariableDeclarationKind } from \"ts-morph\";\nimport {\n formatSourceFileWithPrettier,\n getObjectLiteral,\n getOrCreateSourceFile,\n getPreviousVersionSourceFile,\n} from \"utils\";\n\nexport async function makeReducerOperationHandlersForModules(\n fileMakerArgs: DocumentModelFileMakerArgs,\n) {\n const { specification } = fileMakerArgs;\n for (const module of specification.modules) {\n await makeReducerOperationHandlerForModule({\n ...fileMakerArgs,\n module,\n });\n }\n}\n\nexport async function makeReducerOperationHandlerForModule({\n project,\n module,\n version,\n srcDirPath,\n versionImportPath,\n pascalCaseDocumentType,\n camelCaseDocumentType,\n}: DocumentModelModuleFileMakerArgs) {\n const kebabCaseModuleName = kebabCase(module.name);\n const pascalCaseModuleName = pascalCase(module.name);\n const filePath = path.join(\n srcDirPath,\n \"reducers\",\n `${kebabCaseModuleName}.ts`,\n );\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n project,\n filePath,\n );\n if (!alreadyExists) {\n const previousVersionFile = getPreviousVersionSourceFile({\n project,\n version,\n filePath,\n });\n if (previousVersionFile) {\n sourceFile.replaceWithText(previousVersionFile.getText());\n }\n }\n const operationsInterfaceTypeName = `${pascalCaseDocumentType}${pascalCaseModuleName}Operations`;\n const operationsInterfaceVariableName = `${camelCaseDocumentType}${pascalCaseModuleName}Operations`;\n\n const existingOperationsInterfaceTypeImport = sourceFile.getImportDeclaration(\n (importDeclaration) =>\n !!importDeclaration\n .getNamedImports()\n .find(\n (importSpecifier) =>\n importSpecifier.getName() === operationsInterfaceTypeName,\n ),\n );\n if (existingOperationsInterfaceTypeImport) {\n existingOperationsInterfaceTypeImport.remove();\n }\n\n const operationsInterfaceTypeImport = sourceFile.addImportDeclaration({\n namedImports: [operationsInterfaceTypeName],\n moduleSpecifier: versionImportPath,\n isTypeOnly: true,\n });\n\n const operationsInterfaceTypeProperties = operationsInterfaceTypeImport\n .getNamedImports()\n .find((value) => value.getName() === operationsInterfaceTypeName)\n ?.getNameNode()\n .getType()\n .getProperties()\n .map((symbol) => symbol.getName());\n\n if (!operationsInterfaceTypeProperties) {\n throw new Error(\"Failed to create operation handler object\");\n }\n\n let operationsInterfaceVariableStatement = sourceFile.getVariableStatement(\n operationsInterfaceVariableName,\n );\n\n if (!operationsInterfaceVariableStatement) {\n operationsInterfaceVariableStatement = sourceFile.addVariableStatement({\n declarationKind: VariableDeclarationKind.Const,\n isExported: true,\n declarations: [\n {\n name: operationsInterfaceVariableName,\n type: operationsInterfaceTypeName,\n initializer: \"{}\",\n },\n ],\n });\n }\n\n const operationsInterfaceObject = getObjectLiteral(\n operationsInterfaceVariableStatement,\n );\n\n if (!operationsInterfaceObject) {\n throw new Error(\"Failed to build reducer object\");\n }\n\n // Build a lookup map from method name to operation spec to access reducer code\n const operationsByMethodName = new Map<\n string,\n (typeof module.operations)[number]\n >();\n for (const operation of module.operations) {\n if (operation.name) {\n const methodName = `${camelCase(operation.name)}Operation`;\n operationsByMethodName.set(methodName, operation);\n }\n }\n\n for (const name of operationsInterfaceTypeProperties) {\n if (operationsInterfaceObject.getProperty(name)) continue;\n\n const operationSpec = operationsByMethodName.get(name);\n const reducerCode = operationSpec?.reducer?.trim();\n\n operationsInterfaceObject.addMethod({\n name,\n parameters: [{ name: \"state\" }, { name: \"action\" }],\n statements: reducerCode\n ? [reducerCode]\n : [\n `// TODO: implement ${name} reducer`,\n ts`throw new Error(\"Reducer for '${name}' not implemented.\")`.raw,\n ],\n });\n }\n\n // Add error imports for error classes referenced in reducer code\n addErrorImportsForModule(sourceFile, module);\n\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeDocumentModelSrcIndexFile({\n project,\n ...variableNames\n}: DocumentModelFileMakerArgs) {\n const template = documentModelSrcIndexFileTemplate;\n const { srcDirPath } = variableNames;\n\n const filePath = path.join(srcDirPath, \"index.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeDocumentModelSrcUtilsFile({\n project,\n srcDirPath,\n version,\n}: DocumentModelFileMakerArgs) {\n const template = documentModelSrcUtilsTemplate;\n\n const filePath = path.join(srcDirPath, \"utils.ts\");\n\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n project,\n filePath,\n );\n\n if (!alreadyExists) {\n const previousVersionSourceFile = getPreviousVersionSourceFile({\n project,\n version,\n filePath,\n });\n\n if (previousVersionSourceFile) {\n sourceFile.replaceWithText(previousVersionSourceFile.getText());\n } else {\n sourceFile.replaceWithText(template);\n }\n }\n\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nfunction addErrorImportsForModule(\n sourceFile: SourceFile,\n module: ModuleSpecification,\n): void {\n // Collect all unique errors from all operations in this module\n const allErrors: { name: string }[] = [];\n for (const operation of module.operations) {\n if (Array.isArray(operation.errors)) {\n for (const error of operation.errors) {\n if (error.name && !allErrors.find((e) => e.name === error.name)) {\n allErrors.push({ name: error.name });\n }\n }\n }\n }\n\n if (allErrors.length === 0) return;\n\n // Scan the source file content to find which error classes are actually referenced\n const sourceFileContent = sourceFile.getFullText();\n const usedErrors: string[] = [];\n\n for (const error of allErrors) {\n const errorPattern = new RegExp(`\\\\b${error.name}\\\\b`, \"g\");\n if (errorPattern.test(sourceFileContent)) {\n usedErrors.push(error.name);\n }\n }\n\n if (usedErrors.length === 0) return;\n\n const errorImportPath = `../../gen/${kebabCase(module.name)}/error.js`;\n\n const existingErrorImport = sourceFile\n .getImportDeclarations()\n .find(\n (importDecl) => importDecl.getModuleSpecifierValue() === errorImportPath,\n );\n\n if (existingErrorImport) {\n const existingNamedImports = existingErrorImport\n .getNamedImports()\n .map((namedImport) => namedImport.getName());\n\n const newErrorsToImport = usedErrors.filter(\n (errorName) => !existingNamedImports.includes(errorName),\n );\n\n if (newErrorsToImport.length > 0) {\n existingErrorImport.addNamedImports(newErrorsToImport);\n }\n } else {\n sourceFile.addImportDeclaration({\n namedImports: usedErrors,\n moduleSpecifier: errorImportPath,\n });\n }\n}\n","import type {\n DocumentModelFileMakerArgs,\n DocumentModelModuleFileMakerArgs,\n} from \"@powerhousedao/codegen\";\nimport { ts } from \"@tmpl/core\";\nimport { camelCase, kebabCase, pascalCase } from \"change-case\";\nimport path from \"path\";\nimport { filter, isIncludedIn, map, pipe } from \"remeda\";\nimport {\n documentModelTestFileTemplate,\n makeOperationImportNames,\n makeTestCaseForOperation,\n} from \"templates\";\nimport { SyntaxKind } from \"ts-morph\";\nimport {\n formatSourceFileWithPrettier,\n getOrCreateSourceFile,\n getPreviousVersionSourceFile,\n} from \"utils\";\n\nexport async function makeDocumentModelModulesOperationTestFiles(\n fileMakerArgs: DocumentModelFileMakerArgs,\n) {\n for (const module of fileMakerArgs.specification.modules) {\n await makeOperationModuleTestFile({ ...fileMakerArgs, module });\n }\n}\n\nexport async function makeOperationModuleTestFile(\n args: DocumentModelModuleFileMakerArgs,\n) {\n const {\n project,\n module,\n version,\n versionImportPath,\n testsDirPath,\n isPhDocumentOfTypeFunctionName,\n } = args;\n const kebabCaseModuleName = kebabCase(module.name);\n const pascalCaseModuleName = pascalCase(module.name);\n const moduleOperationsTypeName = `${pascalCaseModuleName}Operations`;\n const filePath = path.join(testsDirPath, `${kebabCaseModuleName}.test.ts`);\n\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n project,\n filePath,\n );\n\n if (!alreadyExists) {\n const previousVersionSourceFile = getPreviousVersionSourceFile({\n project,\n version,\n filePath,\n });\n\n if (previousVersionSourceFile) {\n sourceFile.replaceWithText(previousVersionSourceFile.getText());\n } else {\n sourceFile.replaceWithText(\n ts`\n import { generateMock } from \"document-model\";\n import { describe, expect, it } from \"vitest\";\n\n describe(\"${moduleOperationsTypeName}\", () => {\n\n });\n `.raw,\n );\n }\n }\n\n const importNames = makeOperationImportNames(args);\n const namedImports = importNames.map((name) => ({ name }));\n\n let actionsImportDeclaration = sourceFile\n .getImportDeclarations()\n .filter((i) => !i.isTypeOnly())\n .find((importDeclaration) =>\n importDeclaration\n .getModuleSpecifier()\n .getText()\n .includes(versionImportPath),\n );\n\n if (!actionsImportDeclaration) {\n actionsImportDeclaration = sourceFile.addImportDeclaration({\n namedImports,\n moduleSpecifier: versionImportPath,\n });\n } else {\n actionsImportDeclaration.setModuleSpecifier(versionImportPath);\n const existingNamedImports = actionsImportDeclaration\n .getNamedImports()\n .map((value) => value.getName());\n\n for (const name of importNames) {\n if (!existingNamedImports.includes(name)) {\n actionsImportDeclaration.addNamedImport(name);\n }\n }\n }\n\n const describeCall = sourceFile\n .getDescendantsOfKind(SyntaxKind.CallExpression)\n .find((call) => {\n const expressionText = call.getExpression().getText();\n const args = call.getArguments();\n const firstArg = args[0];\n return (\n expressionText === \"describe\" &&\n pascalCase(firstArg.getText()).includes(moduleOperationsTypeName)\n );\n });\n\n if (!describeCall) {\n console.error(\n `Test file at path ${filePath} has no describe block for ${moduleOperationsTypeName}`,\n );\n return;\n }\n\n const describeCallBody = describeCall\n .getArguments()[1]\n .asKindOrThrow(SyntaxKind.ArrowFunction);\n\n const testCaseNames = describeCall\n .getDescendantsOfKind(SyntaxKind.CallExpression)\n .filter((call) => {\n const expressionText = call.getExpression().getText();\n return expressionText === \"it\" || expressionText === \"test\";\n })\n .map((c) => c.getArguments()[0].getText());\n\n const testCasesToAdd = pipe(\n module.operations,\n filter((o) => !isIncludedIn(camelCase(o.name ?? \"\"), testCaseNames)),\n map((o) => makeTestCaseForOperation(o, isPhDocumentOfTypeFunctionName)),\n );\n\n describeCallBody.addStatements(testCasesToAdd);\n\n const GENERATE_MOCK_NAME = \"generateMock\";\n const GENERATE_MOCK_MODULE_SPECIFIER = \"@powerhousedao/codegen\";\n\n const generateMockImport = sourceFile.getImportDeclaration((i) =>\n i.getNamedImports().some((v) => v.getText().includes(GENERATE_MOCK_NAME)),\n );\n\n const hasGenerateMockInSourceFile = sourceFile\n .getText()\n .includes(GENERATE_MOCK_NAME);\n\n if (hasGenerateMockInSourceFile && !generateMockImport) {\n sourceFile.addImportDeclaration({\n namedImports: [GENERATE_MOCK_NAME],\n moduleSpecifier: GENERATE_MOCK_MODULE_SPECIFIER,\n });\n }\n\n sourceFile.fixUnusedIdentifiers();\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function makeDocumentModelTestFile(\n args: DocumentModelFileMakerArgs,\n) {\n const { project, testsDirPath } = args;\n const template = documentModelTestFileTemplate(args);\n\n const filePath = path.join(testsDirPath, \"document-model.test.ts\");\n\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n project,\n filePath,\n );\n\n if (alreadyExists) return;\n\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n","import type { DocumentModelFileMakerArgs } from \"file-builders\";\nimport path from \"path\";\nimport { upgradeManifestTemplate, upgradeTransitionTemplate } from \"templates\";\nimport { VariableDeclarationKind, type Project } from \"ts-morph\";\nimport {\n formatSourceFileWithPrettier,\n getObjectLiteral,\n getOrCreateSourceFile,\n getVariableDeclarationByTypeName,\n} from \"utils\";\n\nexport async function makeUpgradeFile(args: DocumentModelFileMakerArgs) {\n const { project, version, upgradesDirPath } = args;\n if (version < 2) return;\n\n const filePath = path.join(upgradesDirPath, `v${version}.ts`);\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n project,\n filePath,\n );\n\n if (alreadyExists) return;\n\n const template = upgradeTransitionTemplate(args);\n\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nexport async function createOrUpdateUpgradeManifestFile(\n args: DocumentModelFileMakerArgs,\n) {\n const { project, versions, upgradesDirPath } = args;\n const filePath = path.join(upgradesDirPath, \"upgrade-manifest.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n const template = upgradeManifestTemplate(args);\n\n sourceFile.replaceWithText(template);\n\n const upgradeTransitionImports = buildUpgradeTransitionImports(versions);\n\n sourceFile.addImportDeclarations(upgradeTransitionImports);\n\n const upgradeManifestStatement = getVariableDeclarationByTypeName(\n sourceFile,\n \"UpgradeManifest\",\n )?.getVariableStatementOrThrow();\n const objectLiteral = getObjectLiteral(upgradeManifestStatement);\n const upgradesProperty = objectLiteral?.getProperty(\"upgrades\");\n const upgrades = buildUpgrades(versions);\n upgradesProperty?.replaceWithText(upgrades);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nfunction buildUpgrades(specVersions: number[]) {\n const upgradeStrings: string[] = [];\n\n for (const version of specVersions) {\n if (version < 2) continue;\n upgradeStrings.push(`v${version}`);\n }\n\n return `upgrades: { ${upgradeStrings.join(\",\\n\")} }`;\n}\n\nfunction buildUpgradeTransitionImports(specVersions: number[]) {\n const imports: {\n namedImports: string[];\n moduleSpecifier: string;\n }[] = [];\n\n for (const version of specVersions) {\n if (version < 2) continue;\n const namedImports = [`v${version}`];\n const moduleSpecifier = `./v${version}.js`;\n imports.push({\n namedImports,\n moduleSpecifier,\n });\n }\n\n return imports;\n}\n\ntype MakeVersionConstantsFileArgs = {\n project: Project;\n upgradesDirPath: string;\n versions: number[];\n latestVersion: number;\n};\nexport async function createOrUpdateVersionConstantsFile({\n versions,\n latestVersion,\n project,\n upgradesDirPath,\n}: MakeVersionConstantsFileArgs) {\n const SUPPORTED_VERSIONS = \"supportedVersions\";\n const LATEST_VERSION = \"latestVersion\";\n const filePath = path.join(upgradesDirPath, \"versions.ts\");\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n sourceFile.replaceWithText(\"\");\n\n const latestVersionIndex = versions.indexOf(latestVersion);\n const versionInitializer = `[${versions.join(\", \")}] as const;`;\n const latestInitializer = `${SUPPORTED_VERSIONS}[${latestVersionIndex}];`;\n\n sourceFile.addVariableStatement({\n declarationKind: VariableDeclarationKind.Const,\n isExported: true,\n declarations: [\n {\n name: SUPPORTED_VERSIONS,\n initializer: versionInitializer,\n },\n ],\n });\n\n sourceFile.addVariableStatement({\n declarationKind: VariableDeclarationKind.Const,\n isExported: true,\n declarations: [\n {\n name: LATEST_VERSION,\n initializer: latestInitializer,\n },\n ],\n });\n\n await formatSourceFileWithPrettier(sourceFile);\n}\n\ntype MakeUpgradesIndexFileArgs = {\n project: Project;\n upgradesDirPath: string;\n upgradeManifestName: string;\n versions: number[];\n};\nexport async function makeUpgradesIndexFile({\n project,\n upgradesDirPath,\n versions,\n upgradeManifestName,\n}: MakeUpgradesIndexFileArgs) {\n const filePath = path.join(upgradesDirPath, \"index.ts\");\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n sourceFile.replaceWithText(\"\");\n\n const upgradeReducerExports = makeUpgradeReducerExports(versions);\n\n sourceFile.addExportDeclarations([\n {\n namedExports: [upgradeManifestName],\n moduleSpecifier: \"./upgrade-manifest.js\",\n },\n {\n namedExports: [\"supportedVersions\", \"latestVersion\"],\n moduleSpecifier: \"./versions.js\",\n },\n ...upgradeReducerExports,\n ]);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nfunction makeUpgradeReducerExports(specVersions: number[]) {\n const exports: {\n namedExports: string[];\n moduleSpecifier: string;\n }[] = [];\n\n for (const version of specVersions) {\n if (version < 2) continue;\n const namedExports = [`v${version}`];\n const moduleSpecifier = `./v${version}.js`;\n exports.push({\n namedExports,\n moduleSpecifier,\n });\n }\n\n return exports;\n}\n","import type { DocumentModelFileMakerArgs } from \"@powerhousedao/codegen\";\nimport { directoryExists, fileExists } from \"@powerhousedao/shared/clis\";\nimport type { DocumentModelGlobalState } from \"@powerhousedao/shared/document-model\";\nimport { kebabCase } from \"change-case\";\nimport { createOrUpdateManifest } from \"file-builders\";\nimport { getDocumentModelVariableNames } from \"name-builders\";\nimport { copyFile, mkdir, readdir, writeFile } from \"node:fs/promises\";\nimport { join, relative } from \"node:path\";\nimport {\n capitalize,\n filter,\n forEach,\n isTruthy,\n last,\n map,\n pipe,\n prop,\n sort,\n subtract,\n unique,\n uniqueBy,\n} from \"remeda\";\nimport { documentModelsTemplate, upgradeManifestsTemplate } from \"templates\";\nimport { SyntaxKind, type Project } from \"ts-morph\";\nimport {\n ensureDirectoriesExist,\n formatSourceFileWithPrettier,\n getInitialStates,\n getOrCreateDirectory,\n getOrCreateSourceFile,\n getVariableDeclarationByTypeName,\n} from \"utils\";\nimport { generateDocumentModelZodSchemas } from \"../../codegen/graphql.js\";\nimport {\n makeDocumentModelDocumentTypeFile,\n makeDocumentModelGenActionsFile,\n makeDocumentModelGenControllerFile,\n makeDocumentModelGenCreatorsFile,\n makeDocumentModelGenDirOperationModulesFiles,\n makeDocumentModelGenDocumentModelFile,\n makeDocumentModelGenDocumentSchemaFile,\n makeDocumentModelGenIndexFile,\n makeDocumentModelGenPhFactoriesFile,\n makeDocumentModelGenReducerFile,\n makeDocumentModelGenTypesFile,\n makeDocumentModelGenUtilsFile,\n makeDocumentModelSchemaIndexFile,\n} from \"./gen-dir.js\";\nimport {\n makeDocumentModelHooksFile,\n makeDocumentModelModuleFile,\n makeDocumentModelRootActionsFile,\n makeDocumentModelUtilsFile,\n makeDocumentModelVersionIndexFile,\n} from \"./root-dir.js\";\nimport {\n makeDocumentModelSrcIndexFile,\n makeDocumentModelSrcUtilsFile,\n makeReducerOperationHandlersForModules,\n} from \"./src-dir.js\";\nimport {\n makeDocumentModelModulesOperationTestFiles,\n makeDocumentModelTestFile,\n} from \"./tests-dir.js\";\nimport {\n createOrUpdateUpgradeManifestFile,\n createOrUpdateVersionConstantsFile,\n makeUpgradeFile,\n makeUpgradesIndexFile,\n} from \"./upgrades-dir.js\";\n\n/** Generates a document model from the given `documentModelState`\n *\n * for each `specification` in the `documentModelState`\n */\nexport async function tsMorphGenerateDocumentModel(\n documentModelState: DocumentModelGlobalState,\n project: Project,\n) {\n const { name, id, specifications } = documentModelState;\n const { directory: documentModelsDir } = getOrCreateDirectory(\n project,\n \"document-models\",\n );\n const documentModelsDirPath = documentModelsDir.getPath();\n const projectDir = documentModelsDir.getParentOrThrow().getPath();\n const documentModelDirName = kebabCase(name);\n const documentModelDirPath = join(\n documentModelsDirPath,\n documentModelDirName,\n );\n const documentModelImportPath = join(\"document-models\", documentModelDirName);\n const upgradesDirPath = join(documentModelDirPath, \"upgrades\");\n const documentModelVariableNames = getDocumentModelVariableNames(name);\n await ensureDirectoriesExist(\n project,\n documentModelsDirPath,\n documentModelDirPath,\n upgradesDirPath,\n );\n\n const versions = pipe(\n specifications,\n map(prop(\"version\")),\n unique(),\n sort(subtract),\n );\n\n if (versions.length !== specifications.length) {\n throw new Error(\n \"Document model specifications array is misconfigured. Length does not match with spec versions.\",\n );\n }\n\n const latestVersion = Math.max(...versions);\n if (prop(last(specifications), \"version\") !== latestVersion) {\n throw new Error(\n \"Document model has incorrect version at the latest version index\",\n );\n }\n\n await writeDocumentModelStateJsonFile({\n documentModelState,\n documentModelDirName,\n documentModelDirPath,\n });\n\n for (const specification of specifications) {\n const { version } = specification;\n const versionDirName = `v${version}`;\n const versionDirPath = join(documentModelDirPath, versionDirName);\n const versionImportPath = join(documentModelImportPath, versionDirName);\n const srcDirPath = join(versionDirPath, \"src\");\n const testsDirPath = join(versionDirPath, \"tests\");\n const genDirPath = join(versionDirPath, \"gen\");\n const schemaDirPath = join(genDirPath, \"schema\");\n const { initialGlobalState, initialLocalState } = getInitialStates(\n specification.state,\n );\n const hasLocalSchema = specification.state.local.schema !== \"\";\n\n const fileMakerArgs: DocumentModelFileMakerArgs = {\n ...documentModelVariableNames,\n project,\n documentModelState,\n version,\n versions,\n latestVersion,\n specification,\n initialGlobalState,\n initialLocalState,\n hasLocalSchema,\n projectDir,\n documentModelsDirPath,\n documentModelDirPath,\n documentModelDirName,\n documentModelImportPath,\n versionDirPath,\n versionDirName,\n versionImportPath,\n genDirPath,\n schemaDirPath,\n srcDirPath,\n testsDirPath,\n upgradesDirPath,\n };\n\n // /{document-model-dir}/v{version}/\n await generateDocumentModelZodSchemas(fileMakerArgs);\n await makeDocumentModelVersionIndexFile(fileMakerArgs);\n await makeDocumentModelRootActionsFile(fileMakerArgs);\n await makeDocumentModelUtilsFile(fileMakerArgs);\n await makeDocumentModelHooksFile(fileMakerArgs);\n await makeDocumentModelModuleFile(fileMakerArgs);\n\n // /{document-model-dir}/v{version}/gen/\n await makeDocumentModelSchemaIndexFile(fileMakerArgs);\n await makeDocumentModelGenUtilsFile(fileMakerArgs);\n await makeDocumentModelGenTypesFile(fileMakerArgs);\n await makeDocumentModelGenCreatorsFile(fileMakerArgs);\n await makeDocumentModelGenActionsFile(fileMakerArgs);\n await makeDocumentModelGenDocumentSchemaFile(fileMakerArgs);\n await makeDocumentModelGenReducerFile(fileMakerArgs);\n await makeDocumentModelDocumentTypeFile(fileMakerArgs);\n await makeDocumentModelGenIndexFile(fileMakerArgs);\n await makeDocumentModelGenDocumentModelFile(fileMakerArgs);\n await makeDocumentModelGenPhFactoriesFile(fileMakerArgs);\n await makeDocumentModelGenControllerFile(fileMakerArgs);\n await makeDocumentModelGenDirOperationModulesFiles(fileMakerArgs);\n\n // /{document-model-dir}/v{version}/src/\n await makeDocumentModelSrcIndexFile(fileMakerArgs);\n await makeDocumentModelSrcUtilsFile(fileMakerArgs);\n await makeReducerOperationHandlersForModules(fileMakerArgs);\n\n // /{document-model-dir}/v{version}/tests\n await makeDocumentModelTestFile(fileMakerArgs);\n await makeDocumentModelModulesOperationTestFiles(fileMakerArgs);\n\n // /{document-model-dir}/v{version}/*\n await persistCustomFilesFromPreviousVersion(fileMakerArgs);\n\n // /{document-model-dir}/upgrades/v{version}.ts\n await makeUpgradeFile(fileMakerArgs);\n\n // /{document-model-dir}/upgrades/upgrade-manifest.ts\n await createOrUpdateUpgradeManifestFile(fileMakerArgs);\n }\n\n // /upgrades/versions.ts\n await createOrUpdateVersionConstantsFile({\n project,\n versions,\n latestVersion,\n upgradesDirPath,\n });\n\n // /{document-model-dir}/upgrades/index.ts\n await makeUpgradesIndexFile({\n ...documentModelVariableNames,\n project,\n versions,\n upgradesDirPath,\n });\n // /document-models/{document-model-dir}/index.ts\n await makeDocumentModelIndexFile({\n project,\n documentModelDirPath,\n latestVersion,\n });\n // /document-models/document-models.ts\n await makeDocumentModelsFile({ project, documentModelsDirPath });\n // /document-models/index.ts\n await makeDocumentModelsIndexFile({ project, documentModelsDirPath });\n // /document-models/upgrade-manifests.ts\n await makeUpgradeManifestsFile({ project, documentModelsDirPath });\n await createOrUpdateManifest(\n {\n documentModels: [\n {\n name,\n id,\n },\n ],\n },\n projectDir,\n );\n}\n\nasync function makeUpgradeManifestsFile(args: {\n project: Project;\n documentModelsDirPath: string;\n}) {\n const { project, documentModelsDirPath } = args;\n const sourceFile = project.createSourceFile(\n join(documentModelsDirPath, \"upgrade-manifests.ts\"),\n upgradeManifestsTemplate,\n { overwrite: true },\n );\n\n const upgradeManifestsArray = sourceFile\n .getVariableDeclarationOrThrow(\"upgradeManifests\")\n .getFirstDescendantByKindOrThrow(SyntaxKind.ArrayLiteralExpression);\n\n pipe(\n project.getSourceFiles(),\n // find the upgrade manifest files for each document model\n filter((sourceFile) => sourceFile.getBaseName() === \"upgrade-manifest.ts\"),\n // get the upgrade manifest objects\n map((sourceFile) =>\n getVariableDeclarationByTypeName(sourceFile, \"UpgradeManifest\"),\n ),\n filter(isTruthy),\n // get name and dir for adding to upgradeManifests array and making import specifier\n map((variableDeclaration) => ({\n name: variableDeclaration.getName(),\n // the upgrade-manifest.ts file lives in `document-models/{document-model-dir}/upgrades`\n documentModelDir: variableDeclaration\n .getSourceFile()\n .getDirectory()\n .getParentOrThrow()\n .getBaseName(),\n })),\n uniqueBy(prop(\"name\")),\n // make named imports and module specifier to add for each upgrade manifest\n map(({ name, documentModelDir }) => ({\n name,\n namedImports: [name],\n moduleSpecifier: join(\"document-models\", documentModelDir, \"upgrades\"),\n })),\n // add import of each upgrade manifest and add it to the upgradeManifests array\n forEach(({ name, namedImports, moduleSpecifier }) => {\n sourceFile.addImportDeclaration({ namedImports, moduleSpecifier });\n upgradeManifestsArray.addElement(name);\n }),\n );\n\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nasync function makeDocumentModelsFile(args: {\n project: Project;\n documentModelsDirPath: string;\n}) {\n const { project, documentModelsDirPath } = args;\n const sourceFile = project.createSourceFile(\n join(documentModelsDirPath, \"document-models.ts\"),\n documentModelsTemplate,\n { overwrite: true },\n );\n\n const documentModelsArray = sourceFile\n .getVariableDeclarationOrThrow(\"documentModels\")\n .getFirstDescendantByKindOrThrow(SyntaxKind.ArrayLiteralExpression);\n\n pipe(\n project\n .getDirectoryOrThrow(documentModelsDirPath)\n .getDescendantSourceFiles(),\n filter((sourceFile) => sourceFile.getBaseName() === \"module.ts\"),\n uniqueBy((sourceFile) => sourceFile.getFilePath()),\n map((sourceFile) =>\n getVariableDeclarationByTypeName(sourceFile, \"DocumentModel\"),\n ),\n filter(isTruthy),\n map((variableDeclaration) => ({\n name: variableDeclaration.getName(),\n directory: variableDeclaration.getSourceFile().getDirectory(),\n })),\n map(({ name, directory }) => ({\n name,\n version: directory.getBaseName(),\n documentModelDir: directory.getParentOrThrow().getBaseName(),\n })),\n filter(({ version }) => /^v\\d+$/.test(version)),\n map(({ name, version, documentModelDir }) => ({\n name: `${name}${capitalize(version)}`,\n // imports the document model with the version appended to the name\n namedImports: [`${name} as ${name}${capitalize(version)}`],\n moduleSpecifier: join(\"document-models\", documentModelDir, version),\n })),\n forEach(({ name, namedImports, moduleSpecifier }) => {\n sourceFile.addImportDeclaration({\n namedImports,\n moduleSpecifier,\n });\n documentModelsArray.addElement(name);\n }),\n );\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nasync function makeDocumentModelsIndexFile(args: {\n project: Project;\n documentModelsDirPath: string;\n}) {\n const { project, documentModelsDirPath } = args;\n const sourceFile = project.createSourceFile(\n join(documentModelsDirPath, \"index.ts\"),\n \"\",\n { overwrite: true },\n );\n pipe(\n project\n .getDirectoryOrThrow(documentModelsDirPath)\n .getDescendantSourceFiles(),\n filter((sourceFile) => sourceFile.getBaseName() === \"module.ts\"),\n uniqueBy((sourceFile) => sourceFile.getFilePath()),\n map((sourceFile) =>\n getVariableDeclarationByTypeName(sourceFile, \"DocumentModel\"),\n ),\n filter(isTruthy),\n map((variableDeclaration) => ({\n name: variableDeclaration.getName(),\n directory: variableDeclaration.getSourceFile().getDirectory(),\n })),\n map(({ name, directory }) => ({\n name,\n version: directory.getBaseName(),\n documentModelDir: directory.getParentOrThrow().getBaseName(),\n })),\n filter(({ version }) => /^v\\d+$/.test(version)),\n map(({ name, version, documentModelDir }) => ({\n // exports the document model with the version appended to the name\n namedExports: [`${name} as ${name}${capitalize(version)}`],\n moduleSpecifier: `./${documentModelDir}/${version}/module.js`,\n })),\n forEach(({ namedExports, moduleSpecifier }) => {\n sourceFile.addExportDeclaration({\n namedExports,\n moduleSpecifier,\n });\n }),\n );\n await formatSourceFileWithPrettier(sourceFile);\n}\n\n/** Writes a json file derived from a `documentModelState` */\nasync function writeDocumentModelStateJsonFile({\n documentModelState,\n documentModelDirName,\n documentModelDirPath,\n}: {\n documentModelState: DocumentModelGlobalState;\n documentModelDirPath: string;\n documentModelDirName: string;\n}) {\n const filePath = join(documentModelDirPath, `${documentModelDirName}.json`);\n const documentModelStateJson = JSON.stringify(documentModelState, null, 2);\n await writeFile(filePath, documentModelStateJson);\n}\n\nasync function makeDocumentModelIndexFile(args: {\n project: Project;\n documentModelDirPath: string;\n latestVersion: number;\n}) {\n const { project, documentModelDirPath, latestVersion } = args;\n\n const filePath = join(documentModelDirPath, \"index.ts\");\n\n const { sourceFile } = getOrCreateSourceFile(project, filePath);\n\n sourceFile.replaceWithText(\"\");\n sourceFile.addExportDeclarations([\n { moduleSpecifier: `./v${latestVersion}/index.js` },\n { moduleSpecifier: `./upgrades/index.js` },\n ]);\n\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nasync function persistCustomFilesFromPreviousVersion(\n args: DocumentModelFileMakerArgs,\n) {\n const { version, documentModelDirPath } = args;\n const previousVersion = version - 1;\n if (previousVersion <= 1) return;\n\n const currentVersionDirName = `v${version}`;\n const previousVersionDirName = `v${previousVersion}`;\n\n if (currentVersionDirName === previousVersionDirName) return;\n\n const previousVersionDirPath = join(\n documentModelDirPath,\n previousVersionDirName,\n );\n const currentVersionDirPath = join(\n documentModelDirPath,\n currentVersionDirName,\n );\n const previousVersionDirExists = await directoryExists(\n previousVersionDirPath,\n );\n\n if (!previousVersionDirExists) return;\n\n const previousVersionDirContents = await readdir(previousVersionDirPath, {\n withFileTypes: true,\n recursive: true,\n });\n\n const previousVersionFiles = previousVersionDirContents\n .filter((dirEnt) => dirEnt.isFile())\n .map(({ name, parentPath }) => ({\n name,\n parentPath,\n relativePath: relative(previousVersionDirPath, parentPath),\n }));\n\n for (const { name, relativePath } of previousVersionFiles) {\n const filePathInCurrentVersionDir = join(\n currentVersionDirPath,\n relativePath,\n name,\n );\n const filePathInPreviousVersionDir = join(\n previousVersionDirPath,\n relativePath,\n name,\n );\n const existsInPreviousVersionDir = await fileExists(\n filePathInPreviousVersionDir,\n );\n const existsInCurrentVersionDir = await fileExists(\n filePathInCurrentVersionDir,\n );\n if (existsInPreviousVersionDir && !existsInCurrentVersionDir) {\n console.log(\n `Persisting file \"${join(relativePath, name)}\" from previous version directory.`,\n );\n await mkdir(join(currentVersionDirPath, relativePath), {\n recursive: true,\n });\n await copyFile(filePathInPreviousVersionDir, filePathInCurrentVersionDir);\n }\n }\n}\n","import type { OperationSpecification } from \"@powerhousedao/shared\";\nimport { isNonNullish } from \"remeda\";\n\nexport function operationHasInput(operation: OperationSpecification) {\n return isNonNullish(operation.schema);\n}\n\nexport function operationHasEmptyInput(operation: OperationSpecification) {\n return (\n operation.schema?.includes(\"_empty\") &&\n !operation.schema.replace(/_empty:\\s*Boolean/, \"\").match(/\\w+:\\s*\\w+/)\n );\n}\n\nexport function operationHasAttachment(operation: OperationSpecification) {\n return operation.schema?.includes(\": Attachment\");\n}\n","import path from \"node:path\";\nimport type { Project } from \"ts-morph\";\n\ntype MakeModuleIndexFileArgs = {\n /** The project to make the legacy index file for */\n project: Project;\n /** The directory containing the module.ts files to generate from */\n modulesDirPath: string;\n modules: {\n unversionedName: string;\n versionedName: string;\n moduleSpecifier: string;\n }[];\n};\n\n/**\n * Makes a index.ts file for the modules file which exports the modules as individual exports instead of an array of named exports.\n */\nexport function makeModulesIndexFile({\n project,\n modulesDirPath,\n modules,\n}: MakeModuleIndexFileArgs) {\n const indexSourceFilePath = path.join(modulesDirPath, \"index.ts\");\n\n // get the source file for the index.ts file if it exists\n let indexSourceFile = project.getSourceFile(indexSourceFilePath);\n // if the index.ts file doesn't exist, create it\n if (!indexSourceFile) {\n indexSourceFile = project.createSourceFile(indexSourceFilePath, \"\");\n } else {\n indexSourceFile.replaceWithText(\"\");\n }\n\n indexSourceFile.addExportDeclarations(\n modules.map(({ versionedName, unversionedName, moduleSpecifier }) => ({\n namedExports: [\n versionedName\n ? `${unversionedName} as ${versionedName}`\n : unversionedName,\n ],\n moduleSpecifier,\n })),\n );\n}\n","import type {\n ConfigEntry,\n Manifest,\n PowerhouseModule,\n} from \"@powerhousedao/shared\";\nimport { fileExists } from \"@powerhousedao/shared/clis\";\nimport { ManifestSchema } from \"@powerhousedao/shared/document-model\";\nimport { defaultManifest } from \"file-builders\";\nimport { loadJsonFile } from \"load-json-file\";\nimport { join } from \"path\";\nimport {\n concat,\n filter,\n isIncludedIn,\n map,\n merge,\n pipe,\n prop,\n uniqueBy,\n} from \"remeda\";\nimport { writeJsonFile } from \"write-json-file\";\n\nexport async function getOrCreateManifestFile(\n manifestPath: string,\n): Promise<Manifest> {\n const hasManifestFile = await fileExists(manifestPath);\n if (!hasManifestFile) {\n await writeJsonFile(manifestPath, defaultManifest);\n }\n const manifestFile = await loadJsonFile(manifestPath);\n return ManifestSchema.parse(manifestFile);\n}\n\nfunction makeUpdatedModulesList(\n oldModules: PowerhouseModule[] = [],\n newModules: PowerhouseModule[] = [],\n): PowerhouseModule[] {\n return pipe(concat(oldModules, newModules), uniqueBy(prop(\"id\")));\n}\n/* Updates the config field of powerhouse.manifest.json assuming unique `name` fields in the `ConfigEntry` objects */\nfunction makeUpdatedConfig(\n oldConfig: ConfigEntry[] = [],\n newConfig: ConfigEntry[] = [],\n) {\n return pipe(\n oldConfig,\n filter(({ name }) => !isIncludedIn(name, map(newConfig, prop(\"name\")))),\n concat(newConfig),\n uniqueBy(prop(\"name\")),\n );\n}\n\n/* Creates a powerhouse.manifest.json file, or updates an existing one with the data provided */\nexport async function createOrUpdateManifest(\n manifestData: Partial<Manifest>,\n projectDir: string,\n) {\n const manifestPath = join(projectDir, \"powerhouse.manifest.json\");\n const existingManifest = await getOrCreateManifestFile(manifestPath);\n\n const updatedManifest: Manifest = {\n ...existingManifest,\n ...manifestData,\n publisher: merge(existingManifest.publisher, manifestData.publisher),\n documentModels: makeUpdatedModulesList(\n existingManifest.documentModels,\n manifestData.documentModels,\n ),\n editors: makeUpdatedModulesList(\n existingManifest.editors,\n manifestData.editors,\n ),\n apps: makeUpdatedModulesList(existingManifest.apps, manifestData.apps),\n processors: makeUpdatedModulesList(\n existingManifest.processors,\n manifestData.processors,\n ),\n subgraphs: makeUpdatedModulesList(\n existingManifest.subgraphs,\n manifestData.subgraphs,\n ),\n config: makeUpdatedConfig(existingManifest.config, manifestData.config),\n };\n await writeJsonFile(manifestPath, updatedManifest);\n return updatedManifest;\n}\n","import path from \"path\";\nimport {\n analyticsFactoryTemplate,\n analyticsIndexTemplate,\n analyticsProcessorTemplate,\n} from \"templates\";\nimport type { Project } from \"ts-morph\";\nimport { formatSourceFileWithPrettier, getOrCreateSourceFile } from \"utils\";\nimport type { GenerateProcessorArgs } from \"./types.js\";\n\nexport async function tsMorphGenerateAnalyticsProcessor(\n args: GenerateProcessorArgs,\n) {\n const { project, documentTypes, pascalCaseName, dirPath, camelCaseName } =\n args;\n\n await makeIndexFile({\n project,\n pascalCaseName,\n dirPath,\n });\n\n await makeProcessorFile({\n project,\n pascalCaseName,\n dirPath,\n });\n\n await makeFactoryFile({\n project,\n pascalCaseName,\n camelCaseName,\n dirPath,\n documentTypes,\n });\n}\n\nasync function makeIndexFile(v: {\n project: Project;\n pascalCaseName: string;\n dirPath: string;\n}) {\n const template = analyticsIndexTemplate;\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n v.project,\n path.join(v.dirPath, \"index.ts\"),\n );\n if (alreadyExists) return;\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nasync function makeProcessorFile(v: {\n project: Project;\n pascalCaseName: string;\n dirPath: string;\n}) {\n const template = analyticsProcessorTemplate(v);\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n v.project,\n path.join(v.dirPath, \"processor.ts\"),\n );\n if (alreadyExists) return;\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nasync function makeFactoryFile(v: {\n project: Project;\n pascalCaseName: string;\n camelCaseName: string;\n dirPath: string;\n documentTypes: string[];\n}) {\n const template = analyticsFactoryTemplate(v);\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n v.project,\n path.join(v.dirPath, \"factory.ts\"),\n );\n if (alreadyExists) return;\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n","import path from \"path\";\nimport {\n relationalDbFactoryTemplate,\n relationalDbIndexTemplate,\n relationalDbMigrationsTemplate,\n relationalDbProcessorTemplate,\n relationalDbSchemaTemplate,\n} from \"templates\";\nimport type { Project } from \"ts-morph\";\nimport { formatSourceFileWithPrettier, getOrCreateSourceFile } from \"utils\";\nimport type { GenerateProcessorArgs } from \"./types.js\";\n\nexport async function tsMorphGenerateRelationalDbProcessor(\n args: GenerateProcessorArgs,\n) {\n const { project, documentTypes, camelCaseName, pascalCaseName, dirPath } =\n args;\n\n await makeIndexFile({\n project,\n pascalCaseName,\n dirPath,\n });\n\n await makeProcessorFile({\n project,\n pascalCaseName,\n dirPath,\n });\n\n await makeFactoryFile({\n project,\n pascalCaseName,\n camelCaseName,\n dirPath,\n documentTypes,\n });\n\n await makeMigrationsFile({ project, dirPath });\n\n await makeSchemaFile({ project, dirPath });\n}\n\nasync function makeIndexFile(v: {\n project: Project;\n pascalCaseName: string;\n dirPath: string;\n}) {\n const template = relationalDbIndexTemplate;\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n v.project,\n path.join(v.dirPath, \"index.ts\"),\n );\n if (alreadyExists) return;\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nasync function makeProcessorFile(v: {\n project: Project;\n pascalCaseName: string;\n dirPath: string;\n}) {\n const template = relationalDbProcessorTemplate(v);\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n v.project,\n path.join(v.dirPath, \"processor.ts\"),\n );\n if (alreadyExists) return;\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nasync function makeFactoryFile(v: {\n project: Project;\n pascalCaseName: string;\n camelCaseName: string;\n dirPath: string;\n documentTypes: string[];\n}) {\n const template = relationalDbFactoryTemplate(v);\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n v.project,\n path.join(v.dirPath, \"factory.ts\"),\n );\n if (alreadyExists) return;\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nasync function makeSchemaFile(v: { project: Project; dirPath: string }) {\n const template = relationalDbSchemaTemplate();\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n v.project,\n path.join(v.dirPath, \"schema.ts\"),\n );\n if (alreadyExists) return;\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nasync function makeMigrationsFile(v: { project: Project; dirPath: string }) {\n const template = relationalDbMigrationsTemplate();\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n v.project,\n path.join(v.dirPath, \"migrations.ts\"),\n );\n if (alreadyExists) return;\n sourceFile.replaceWithText(template);\n await formatSourceFileWithPrettier(sourceFile);\n}\n","import type {\n ProcessorApp,\n ProcessorApps,\n} from \"@powerhousedao/shared/processors\";\nimport { camelCase, kebabCase, pascalCase } from \"change-case\";\nimport { createOrUpdateManifest } from \"file-builders\";\nimport path from \"path\";\nimport { factoryBuildersTemplate } from \"templates\";\nimport type { SourceFile } from \"ts-morph\";\nimport { ts, type Project } from \"ts-morph\";\nimport {\n ensureDirectoriesExist,\n formatSourceFileWithPrettier,\n getOrCreateDirectory,\n getOrCreateSourceFile,\n} from \"utils\";\nimport { tsMorphGenerateAnalyticsProcessor } from \"./analytics.js\";\nimport { tsMorphGenerateRelationalDbProcessor } from \"./relational-db.js\";\n\nexport async function tsMorphGenerateProcessor(args: {\n project: Project;\n processorName: string;\n documentTypes: string[];\n processorType: \"relationalDb\" | \"analytics\";\n processorApps: ProcessorApps;\n}) {\n const {\n project,\n processorName,\n documentTypes,\n processorType,\n processorApps,\n } = args;\n const kebabCaseName = kebabCase(processorName);\n const camelCaseName = camelCase(processorName);\n const pascalCaseName = pascalCase(processorName);\n const { directory: processorsDir } = getOrCreateDirectory(\n project,\n \"processors\",\n );\n const projectDir = processorsDir.getParentOrThrow().getPath();\n const processorsDirPath = processorsDir.getPath();\n const dirPath = path.join(processorsDirPath, kebabCaseName);\n await ensureDirectoriesExist(project, processorsDirPath, dirPath);\n\n if (processorType === \"analytics\") {\n await tsMorphGenerateAnalyticsProcessor({\n processorName,\n documentTypes,\n camelCaseName,\n dirPath,\n kebabCaseName,\n pascalCaseName,\n processorsDirPath,\n project,\n });\n } else {\n await tsMorphGenerateRelationalDbProcessor({\n processorName,\n documentTypes,\n camelCaseName,\n dirPath,\n kebabCaseName,\n pascalCaseName,\n processorsDirPath,\n project,\n });\n }\n\n for (const processorApp of processorApps) {\n await updateFactoryBuildersFile({\n processorsDirPath,\n processorApp,\n project,\n camelCaseName,\n kebabCaseName,\n });\n }\n await createOrUpdateManifest(\n {\n processors: [{ name: processorName, id: kebabCaseName }],\n },\n projectDir,\n );\n}\n\nasync function updateFactoryBuildersFile(v: {\n project: Project;\n processorsDirPath: string;\n processorApp: ProcessorApp;\n camelCaseName: string;\n kebabCaseName: string;\n}) {\n const {\n project,\n processorsDirPath,\n processorApp,\n camelCaseName,\n kebabCaseName,\n } = v;\n const template = factoryBuildersTemplate;\n const filePath = path.join(processorsDirPath, `${processorApp}.ts`);\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n project,\n filePath,\n );\n if (!alreadyExists) {\n sourceFile.replaceWithText(template);\n }\n const name = `${camelCaseName}FactoryBuilder`;\n const moduleSpecifier = path.join(\"processors\", kebabCaseName);\n\n const factoriesArrayName = \"processorFactoryBuilders\";\n\n let factoryBuildersArray = getFactoryBuildersArray(\n sourceFile,\n factoriesArrayName,\n );\n\n if (!factoryBuildersArray) {\n sourceFile.replaceWithText(template);\n factoryBuildersArray = getFactoryBuildersArray(\n sourceFile,\n factoriesArrayName,\n );\n }\n\n if (!factoryBuildersArray) {\n throw new Error(\n `Could not get factory builders array in file ${processorApp}.ts`,\n );\n }\n\n const importDeclaration = sourceFile\n .getImportDeclarations()\n .flatMap((importDeclaration) =>\n importDeclaration.getNamedImports().map((n) => n.getText()),\n )\n .find((n) => n === name);\n\n if (!importDeclaration) {\n sourceFile.addImportDeclaration({\n namedImports: [name],\n moduleSpecifier,\n });\n }\n\n const arrayElements = factoryBuildersArray\n .getElements()\n .map((e) => e.getText());\n\n if (!arrayElements.includes(name)) {\n factoryBuildersArray.addElement(name);\n }\n\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nfunction getFactoryBuildersArray(sourceFile: SourceFile, name: string) {\n return sourceFile\n .getDescendantsOfKind(ts.SyntaxKind.VariableStatement)\n .flatMap((d) => d.getDescendantsOfKind(ts.SyntaxKind.VariableDeclaration))\n .find((d) => d.getName() === name)\n ?.getDescendantsOfKind(ts.SyntaxKind.ArrayLiteralExpression)\n .at(0);\n}\n","import { camelCase, kebabCase, pascalCase } from \"change-case\";\nimport { createOrUpdateManifest } from \"file-builders\";\nimport path from \"path\";\nimport { filter, isTruthy, map, pipe, uniqueBy } from \"remeda\";\nimport {\n customSubgraphResolversTemplate,\n customSubgraphSchemaTemplate,\n subgraphIndexFileTemplate,\n subgraphLibFileTemplate,\n} from \"templates\";\nimport type { Project } from \"ts-morph\";\nimport {\n ensureDirectoriesExist,\n formatSourceFileWithPrettier,\n getOrCreateDirectory,\n getOrCreateSourceFile,\n} from \"utils\";\n\nexport async function tsMorphGenerateSubgraph(args: {\n subgraphName: string;\n project: Project;\n}): Promise<void> {\n const { subgraphName, project } = args;\n const kebabCaseName = kebabCase(subgraphName);\n const pascalCaseName = pascalCase(subgraphName);\n const camelCaseName = camelCase(subgraphName);\n const { directory: subgraphsDir } = getOrCreateDirectory(\n project,\n \"subgraphs\",\n );\n const subgraphsDirPath = subgraphsDir.getPath();\n const projectDir = subgraphsDir.getParentOrThrow().getPath();\n const subgraphDir = path.join(subgraphsDirPath, kebabCaseName);\n await ensureDirectoriesExist(project, subgraphsDirPath, subgraphDir);\n\n // Always generate base subgraph files (unless_exists)\n await makeBaseSubgraphIndexFile(project, subgraphDir, {\n pascalCaseName,\n kebabCaseName,\n });\n await makeBaseSubgraphLibFile(project, subgraphDir);\n\n // Generate custom subgraph scaffolds (unless_exists)\n await makeCustomSubgraphFiles(project, subgraphDir, {\n pascalCaseName,\n camelCaseName,\n });\n\n await makeSubgraphsIndexFile({ project, subgraphsDir: subgraphsDirPath });\n await createOrUpdateManifest(\n {\n subgraphs: [\n {\n name: subgraphName,\n id: kebabCaseName,\n },\n ],\n },\n projectDir,\n );\n}\n\nasync function makeBaseSubgraphIndexFile(\n project: Project,\n dirPath: string,\n v: { pascalCaseName: string; kebabCaseName: string },\n) {\n const filePath = path.join(dirPath, \"index.ts\");\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n project,\n filePath,\n );\n if (alreadyExists) return;\n sourceFile.replaceWithText(subgraphIndexFileTemplate(v));\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nasync function makeBaseSubgraphLibFile(project: Project, dirPath: string) {\n const filePath = path.join(dirPath, \"lib.ts\");\n const { alreadyExists, sourceFile } = getOrCreateSourceFile(\n project,\n filePath,\n );\n if (alreadyExists) return;\n sourceFile.replaceWithText(subgraphLibFileTemplate());\n await formatSourceFileWithPrettier(sourceFile);\n}\n\nasync function makeCustomSubgraphFiles(\n project: Project,\n dirPath: string,\n v: { pascalCaseName: string; camelCaseName: string },\n) {\n // Schema — skip prettier, contains gql tagged template literal\n const schemaPath = path.join(dirPath, \"schema.ts\");\n const schema = getOrCreateSourceFile(project, schemaPath);\n if (!schema.alreadyExists) {\n schema.sourceFile.replaceWithText(customSubgraphSchemaTemplate(v));\n }\n\n // Resolvers\n const resolversPath = path.join(dirPath, \"resolvers.ts\");\n const resolvers = getOrCreateSourceFile(project, resolversPath);\n if (!resolvers.alreadyExists) {\n resolvers.sourceFile.replaceWithText(customSubgraphResolversTemplate(v));\n await formatSourceFileWithPrettier(resolvers.sourceFile);\n }\n}\n\nexport async function makeSubgraphsIndexFile(args: {\n project: Project;\n subgraphsDir: string;\n}) {\n const { project, subgraphsDir } = args;\n const { sourceFile } = getOrCreateSourceFile(\n project,\n path.join(subgraphsDir, \"index.ts\"),\n );\n const existingExportNames = pipe(\n sourceFile.getExportDeclarations(),\n map((exportDeclaration) =>\n exportDeclaration.getNamespaceExport()?.getName(),\n ),\n filter(isTruthy),\n );\n\n const exportDeclarations = pipe(\n project.getDirectoryOrThrow(subgraphsDir).getDescendantSourceFiles(),\n filter((sourceFile) => sourceFile.getBaseName() === \"index.ts\"),\n uniqueBy((sourceFile) => sourceFile.getFilePath()),\n map((sourceFile) =>\n sourceFile\n .getClasses()\n .find((c) => c.getBaseClass()?.getText().includes(\"BaseSubgraph\")),\n ),\n filter(isTruthy),\n map((classDeclaration) => ({\n name: classDeclaration.getNameOrThrow(),\n subgraphDir: classDeclaration\n .getSourceFile()\n .getDirectory()\n .getBaseName(),\n })),\n filter(({ name }) => !existingExportNames.includes(name)),\n map(({ name, subgraphDir }) => ({\n namespaceExport: name,\n moduleSpecifier: `./${subgraphDir}/index.js`,\n })),\n );\n sourceFile.addExportDeclarations(exportDeclarations);\n await formatSourceFileWithPrettier(sourceFile);\n}\n"],"x_google_ignoreList":[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20],"mappings":";;;;;;;;;;;;;;;;;;AAgCA,SAAgB,MAAM,GAAG;AACvB,QAAO,MAAA,MAAmB,MAAA;;AAE5B,SAAgB,aAAa,GAAG;AAC9B,QAAO,MAAA,KAAa,MAAA;;AAEtB,SAAgB,kBAAkB,GAAG;AACnC,QAAO,aAAa,EAAE,IAAI,MAAM,EAAE;;AAEpC,SAAgB,gBAAgB,GAAG;AACjC,QAAO,MAAA,MAAe,MAAA,MAA6B,MAAA,MAA8B,MAAA,OAA4B,MAAA;;;;ACrC/G,MAAM,aAAa;AACnB,SAAS,kBAAkB,MAAM;AAC/B,KAAI,SAAS,KAAM,QAAO;CAC1B,IAAI;CACJ,IAAI,SAAS;CACb,MAAM,MAAM,KAAK;CACjB,MAAM,MAAM;AAEZ,MAAI,IAAI,MAAM,GAAG,MAAM,KAAK,OAAM;AAChC,SAAO,IAAI,QAAQ,KAAK,OAAO,IAAI,CAAC;AAEpC,MAAI,OAAO,GAAI;AAEf,MAAI,OAAO,EAAG,QAAO;AACrB,YAAU;;AAGZ,QAAO,SAAS,MAAM;;AAExB,SAAS,oBAAoB,MAAM;CAEjC,MAAM,QAAQ,KAAK,QAAQ,YAAY,GAAG;CAC1C,MAAM,MAAM,MAAM;CAClB,MAAM,MAAM;CAEZ,MAAM,SAAS,EAAE;CACjB,IAAI,OAAO;AACX,MAAI,IAAI,MAAM,GAAG,MAAM,KAAK,OAAM;AAChC,MAAI,MAAM,MAAM,KAAK,KAAK;AACxB,UAAO,KAAK,QAAQ,KAAK,IAAK;AAC9B,UAAO,KAAK,QAAQ,IAAI,IAAK;AAC7B,UAAO,KAAK,OAAO,IAAK;;AAE1B,SAAO,QAAQ,IAAI,IAAI,QAAQ,MAAM,OAAO,IAAI,CAAC;;CAGnD,MAAM,WAAW,MAAM,IAAI;AAC3B,KAAI,aAAa,GAAG;AAClB,SAAO,KAAK,QAAQ,KAAK,IAAK;AAC9B,SAAO,KAAK,QAAQ,IAAI,IAAK;AAC7B,SAAO,KAAK,OAAO,IAAK;YACf,aAAa,IAAI;AAC1B,SAAO,KAAK,QAAQ,KAAK,IAAK;AAC9B,SAAO,KAAK,QAAQ,IAAI,IAAK;YACpB,aAAa,GACtB,QAAO,KAAK,QAAQ,IAAI,IAAK;AAE/B,QAAO,IAAI,WAAW,OAAO;;AAE/B,SAAS,oBAAoB,QAAQ;CACnC,MAAM,MAAM,OAAO;CACnB,MAAM,MAAM;CAEZ,IAAI,SAAS;CACb,IAAI,OAAO;AACX,MAAI,IAAI,MAAM,GAAG,MAAM,KAAK,OAAM;AAChC,MAAI,MAAM,MAAM,KAAK,KAAK;AACxB,aAAU,IAAI,QAAQ,KAAK;AAC3B,aAAU,IAAI,QAAQ,KAAK;AAC3B,aAAU,IAAI,QAAQ,IAAI;AAC1B,aAAU,IAAI,OAAO;;AAEvB,UAAQ,QAAQ,KAAK,OAAO;;CAG9B,MAAM,OAAO,MAAM;AACnB,KAAI,SAAS,GAAG;AACd,YAAU,IAAI,QAAQ,KAAK;AAC3B,YAAU,IAAI,QAAQ,KAAK;AAC3B,YAAU,IAAI,QAAQ,IAAI;AAC1B,YAAU,IAAI,OAAO;YACZ,SAAS,GAAG;AACrB,YAAU,IAAI,QAAQ,KAAK;AAC3B,YAAU,IAAI,QAAQ,IAAI;AAC1B,YAAU,IAAI,QAAQ,IAAI;AAC1B,YAAU,IAAI;YACL,SAAS,GAAG;AACrB,YAAU,IAAI,QAAQ,IAAI;AAC1B,YAAU,IAAI,QAAQ,IAAI;AAC1B,YAAU,IAAI;AACd,YAAU,IAAI;;AAEhB,QAAO;;AAET,SAAS,SAAS,KAAK;AACrB,QAAO,eAAe;;AAExB,MAAa,SAAS;CACpB,KAAK;CACL,WAAW;CACX,MAAM;CACN,WAAW;CACX,WAAW;CACX,SAAS;CACV;;;AC/FD,MAAM,qBAAqB;CACzB;CACA;CACA;CACD;AACD,MAAM,sBAAsB;CAC1B;CACA;CACA;CACD;AACD,MAAM,gBAAgB,CACpB,GAAG,oBACH,GAAG,oBACJ;AACD,MAAa,OAAO;CAClB,KAAK;CACL,MAAM;CACN,cAAc;CACd,YAAY,UAAQ,OAAO,UAAU,aAAa,iBAAiB;CACnE,YAAY,SAAO,mBAAmB,SAAS,KAAK;CACpD,UAAU,SAAO,cAAc,SAAS,KAAK;CAC7C,WAAW;EAET,YAAY,WAAS;AAEnB,WADc,kBAAkB,UAAU,OAAO,SAAS,GAAG,UAC9C,SAAS;;EAG1B,YAAY,WAAS;AAEnB,WADc,kBAAkB,UAAU,OAAO,SAAS,GAAG,UAC9C,SAAS;;EAG1B,YAAY,WAAS;AAEnB,WADc,kBAAkB,UAAU,OAAO,SAAS,GAAG,UAC9C,SAAS;;EAE3B;CACF;;;ACtCD,SAAgB,SAAS,OAAO;AAC9B,QAAO,UAAU,QAAQ,OAAO,UAAU;;AAE5C,SAAgB,eAAe,GAAG;AAChC,QAAO,MAAM,KAAK,OAAO,sBAAsB,IAAI;;AAErD,SAAgB,cAAc,QAAQ;AACpC,QAAO,OAAO,UAAU,SAAS,KAAK,OAAO,KAAK;;;;ACNpD,MAAM,qCAAqB,IAAI,OAC/B,6IAIwB;AACxB,SAAS,iBAAiB,MAAM;AAC9B,KAAI,CAAC,mBAAmB,KAAK,KAAK,IAElC,KAAK,KAAK,SAAS,OAAO,IACxB,QAAO;AAET,QAAO;;AAET,SAAS,mBAAmB,MAAM;CAChC,IAAI,QAAQ,KAAK,QAAQ,MAAM,GAAG,CAAC,aAAa;CAChD,MAAM,OAAO,MAAM,OAAO,MAAM,KAAK;AACrC,KAAI,MAAM,MAAM,KAAK,SAAS,MAAM,GAAG,CACrC,SAAQ,MAAM,MAAM,EAAE;AAExB,KAAI,UAAU,OACZ,QAAO,SAAS,IAAI,OAAO,oBAAoB,OAAO;AAExD,KAAI,UAAU,OACZ,QAAO;AAET,QAAO,OAAO,WAAW,MAAM;;AAEjC,MAAM,yBAAyB;AAC/B,SAAS,mBACT,QAAQ,OAAO;CACb,MAAM,QAAQ,kBAAkB,SAAS,OAAO,SAAS,GAAG;AAC5D,KAAI,MAAM,MAAM,CACd,SAAO,OAAP;EACE,KAAK,YACH,QAAO;EACT,KAAK,YACH,QAAO;EACT,KAAK,YACH,QAAO;;UAEF,OAAO,sBAAsB,MACtC,SAAO,OAAP;EACE,KAAK,YACH,QAAO;EACT,KAAK,YACH,QAAO;EACT,KAAK,YACH,QAAO;;UAEF,OAAO,sBAAsB,MACtC,SAAO,OAAP;EACE,KAAK,YACH,QAAO;EACT,KAAK,YACH,QAAO;EACT,KAAK,YACH,QAAO;;UAEF,eAAe,MAAM,CAC9B,QAAO;CAET,MAAM,MAAM,MAAM,SAAS,GAAG;AAG9B,QAAO,uBAAuB,KAAK,IAAI,GAAG,IAAI,QAAQ,KAAK,KAAK,GAAG;;AAErE,SAAS,QAAQ,QAAQ;AACvB,KAAI,kBAAkB,OAAQ,UAAS,OAAO,SAAS;AACvD,QAAO,OAAO,WAAW,aAAa,SAAS,MAAM,KAAK,eAAe,OAAO;;AAElF,MAAa,QAAQ;CACnB,KAAK;CACL,WAAW;CACX,cAAc;CACd,MAAM;CACN,WAAW;CACX,WAAW;CACX,SAAS;CACV;;;AC/ED,SAAS,kBAAkB,GAAG,OAAO,OAAO;AAC1C,QAAO,SAAS,KAAK,KAAK;;AAE5B,SAAS,UAAU,GAAG;AACpB,QAAO,kBAAkB,GAAG,IAAM,GAAK,IACvC,kBAAkB,GAAG,IAAM,GAAK,IAChC,kBAAkB,GAAG,IAAM,IAAK;;AAGlC,SAAS,UAAU,GAAG;AACpB,QAAO,kBAAkB,GAAG,IAAM,GAAK;;AAEzC,SAAS,UAAU,GAAG;AACpB,QAAO,kBAAkB,GAAG,IAAM,GAAK;;AAEzC,SAAS,mBAAmB,MAAM;CAChC,MAAM,MAAM,KAAK;CACjB,IAAI,QAAQ;CACZ,IAAI,YAAY;AAChB,KAAI,CAAC,IAAK,QAAO;CACjB,IAAI,KAAK,KAAK;AAEd,KAAI,OAAO,OAAO,OAAO,IACvB,MAAK,KAAK,EAAE;AAEd,KAAI,OAAO,KAAK;AAEd,MAAI,QAAQ,MAAM,IAAK,QAAO;AAC9B,OAAK,KAAK,EAAE;AAEZ,MAAI,OAAO,KAAK;AAEd;AACA,UAAM,QAAQ,KAAK,SAAQ;AACzB,SAAK,KAAK;AACV,QAAI,OAAO,IAAK;AAChB,QAAI,OAAO,OAAO,OAAO,IAAK,QAAO;AACrC,gBAAY;;AAEd,UAAO,aAAa,OAAO;;AAE7B,MAAI,OAAO,KAAK;AAEd;AACA,UAAM,QAAQ,KAAK,SAAQ;AACzB,SAAK,KAAK;AACV,QAAI,OAAO,IAAK;AAChB,QAAI,CAAC,UAAU,KAAK,WAAW,MAAM,CAAC,CAAE,QAAO;AAC/C,gBAAY;;AAEd,UAAO,aAAa,OAAO;;AAG7B,SAAM,QAAQ,KAAK,SAAQ;AACzB,QAAK,KAAK;AACV,OAAI,OAAO,IAAK;AAChB,OAAI,CAAC,UAAU,KAAK,WAAW,MAAM,CAAC,CAAE,QAAO;AAC/C,eAAY;;AAEd,SAAO,aAAa,OAAO;;AAI7B,KAAI,OAAO,IAAK,QAAO;AACvB,QAAM,QAAQ,KAAK,SAAQ;AACzB,OAAK,KAAK;AACV,MAAI,OAAO,IAAK;AAChB,MAAI,CAAC,UAAU,KAAK,WAAW,MAAM,CAAC,CACpC,QAAO;AAET,cAAY;;AAGd,KAAI,CAAC,aAAa,OAAO,IAAK,QAAO;AAErC,QAAO,oBAAoB,KAAK,KAAK,MAAM,MAAM,CAAC;;AAEpD,SAAS,qBAAqB,MAAM;CAClC,IAAI,QAAQ;AACZ,KAAI,MAAM,SAAS,IAAI,CACrB,SAAQ,MAAM,QAAQ,MAAM,GAAG;CAEjC,IAAI,OAAO;CACX,IAAI,KAAK,MAAM;AACf,KAAI,OAAO,OAAO,OAAO,KAAK;AAC5B,MAAI,OAAO,IAAK,QAAO;AACvB,UAAQ,MAAM,MAAM,EAAE;AACtB,OAAK,MAAM;;AAEb,KAAI,UAAU,IAAK,QAAO;AAC1B,KAAI,OAAO,KAAK;AACd,MAAI,MAAM,OAAO,IAAK,QAAO,OAAO,SAAS,MAAM,MAAM,EAAE,EAAE,EAAE;AAC/D,MAAI,MAAM,OAAO,IAAK,QAAO,OAAO,SAAS,OAAO,GAAG;AACvD,SAAO,OAAO,SAAS,OAAO,EAAE;;AAElC,QAAO,OAAO,SAAS,OAAO,GAAG;;AAEnC,SAAS,UAAU,QAAQ;AACzB,KAAI,kBAAkB,OAAQ,UAAS,OAAO,SAAS;AACvD,QAAO,OAAO,WAAW,YAAY,SAAS,MAAM,KAAK,CAAC,eAAe,OAAO;;AAElF,MAAa,MAAM;CACjB,KAAK;CACL,WAAW;CACX,cAAc;CACd,MAAM;CACN,WAAW;CACX,WAAW;EAET,OAAQ,QAAQ;GACd,MAAM,QAAQ,kBAAkB,SAAS,OAAO,SAAS,GAAG;AAC5D,UAAO,SAAS,IAAI,KAAK,MAAM,SAAS,EAAE,KAAK,MAAM,MAAM,SAAS,EAAE,CAAC,MAAM,EAAE;;EAGjF,MAAO,QAAQ;GACb,MAAM,QAAQ,kBAAkB,SAAS,OAAO,SAAS,GAAG;AAC5D,UAAO,SAAS,IAAI,IAAI,MAAM,SAAS,EAAE,KAAK,KAAK,MAAM,SAAS,EAAE,CAAC,MAAM,EAAE;;EAG/E,QAAS,QAAQ;AAEf,WADc,kBAAkB,SAAS,OAAO,SAAS,GAAG,QAC/C,SAAS,GAAG;;EAG3B,YAAa,QAAQ;GACnB,MAAM,QAAQ,kBAAkB,SAAS,OAAO,SAAS,GAAG;AAC5D,UAAO,SAAS,IAAI,KAAK,MAAM,SAAS,GAAG,CAAC,aAAa,KAAK,MAAM,MAAM,SAAS,GAAG,CAAC,aAAa,CAAC,MAAM,EAAE;;EAEhH;CACD,SAAS;CACV;;;ACnID,MAAaA,QAAM;CACjB,KAAK;CACL,UAAW;AACT,SAAO;;CAET,UAAW,MAAM;AACf,SAAO,SAAS,OAAO,OAAO,EAAE;;CAElC,MAAM;CACP;;;ACTD,MAAaC,UAAQ;CACnB,KAAK;CACL,MAAM;CACN,UAAU,SAAO,SAAS,QAAQ,SAAS;CAC3C,YAAY,SAAO;CACpB;;;ACLD,MAAa,MAAM;CACjB,KAAK;CACL,MAAM;CACN,cAAc;CACd,YAAY,WAAS,WAAW;CAChC,iBAAe;CACf,UAAU,SAAO;AACf,SAAO,SAAS,OAAO,SAAS,UAAU,SAAS,UAAU,SAAS;;CAExE,WAAW;EACT,iBAAe;EACf,iBAAe;EACf,iBAAe;EAChB;CACF;;;ACbD,SAAS,gBAAgB,MAAM;CAC7B,MAAM,6BAAa,IAAI,KAAK;AAC5B,MAAK,MAAM,UAAU,MAAK;AACxB,MAAI,CAAC,cAAc,OAAO,CAAE,QAAO;EACnC,MAAM,OAAO,OAAO,KAAK,OAAO;AAChC,MAAI,KAAK,WAAW,EAAG,QAAO;AAC9B,OAAK,MAAM,OAAO,MAAK;AACrB,OAAI,WAAW,IAAI,IAAI,CAAE,QAAO;AAChC,cAAW,IAAI,IAAI;;;AAGvB,QAAO;;AAET,MAAa,OAAO;CAClB,KAAK;CACL,MAAM;CACN,SAAS;CACT,UAAW,MAAM;AACf,SAAO;;CAEV;;;ACpBD,SAAS,iBAAiB,MAAM;AAC9B,KAAI,SAAS,KAAM,QAAO;AAC1B,QAAO,KAAK,OAAO,OAAK,cAAc,GAAG,IAAI,OAAO,KAAK,GAAG,CAAC,WAAW,EAAE;;AAE5E,MAAa,QAAQ;CACnB,KAAK;CACL,UAAW,MAAM;AAEf,SAAO,MAAM,QAAQ,OAAO,QAAQ,IAAI,EAAE;;CAE5C,MAAM;CACN,SAAS;CACV;;;ACbD,MAAM,SAAS;AACf,MAAa,SAAS;CACpB,KAAK;CACL,MAAM;CACN,QAAS,MAAM;AACb,MAAI,SAAS,QAAQ,CAAC,KAAK,OAAQ,QAAO;AAC1C,MAAI,KAAK,OAAO,EAAE,KAAK,KAAK;GAE1B,MAAM,SAAS,KAAK,MAAM,OAAO,EAAE;AACnC,OAAI,CAAC,OAAQ,QAAO;GAEpB,MAAM,YAAY,OAAO,aAAa;AACtC,OAAI,IAAI,IAAI,UAAU,CAAC,OAAO,UAAU,OAAQ,QAAO;;AAEzD,SAAO;;CAET,UAAW,MAAM;EACf,MAAM,EAAE,SAAS,MAAM,YAAY,OAAO,KAAK,MAAM,OAAO,EAAE,UAAU,EAAE;AAC1E,SAAO,IAAI,OAAO,QAAQ,UAAU;;CAEtC,YAAY,WAAS,kBAAkB;CACvC,YAAY,WAAS,OAAO,UAAU;CACvC;;;ACtBD,MAAa,MAAM;CACjB,KAAK;CACL,MAAM;CACN,eAAa;CACb,YAAY,SAAO,SAAS,OAAO,OAAO,EAAE;CAC7C;;;ACLD,MAAa,MAAM;CACjB,KAAK;CACL,MAAM;CACN,YAAY,SAAO,SAAS,OAAO,OAAO,EAAE;CAC5C,UAAU,SAAO;AACf,MAAI,SAAS,KAAM,QAAO;AAC1B,SAAO,OAAO,OAAO,KAAK,CAAC,OAAO,OAAK,OAAO,KAAK;;CAEtD;;;ACTD,MAAa,MAAM;CACjB,KAAK;CACL,MAAM;CACN,eAAa;CACb,YAAY,SAAO,SAAS,OAAO,OAAO;CAC3C;;;ACJD,MAAM,mCAAmB,IAAI,OAAO,qDAEnB;AACjB,MAAM,wCAAwB,IAAI,OAAO,mLAShB;AACzB,SAAS,qBAAqB,MAAM;AAClC,KAAI,SAAS,KAAM,QAAO;AAC1B,KAAI,iBAAiB,KAAK,KAAK,KAAK,KAAM,QAAO;AACjD,KAAI,sBAAsB,KAAK,KAAK,KAAK,KAAM,QAAO;AACtD,QAAO;;AAET,SAAS,uBAAuB,MAAM;CACpC,IAAI,QAAQ,iBAAiB,KAAK,KAAK;AACvC,KAAI,UAAU,KAAM,SAAQ,sBAAsB,KAAK,KAAK;AAC5D,KAAI,UAAU,KACZ,OAAM,IAAI,MAAM,sDAAsD;CAGxE,MAAM,OAAO,CAAC,MAAM;CACpB,MAAM,QAAQ,CAAC,MAAM,KAAK;CAC1B,MAAM,MAAM,CAAC,MAAM;AACnB,KAAI,CAAC,MAAM,GAET,QAAO,IAAI,KAAK,KAAK,IAAI,MAAM,OAAO,IAAI,CAAC;CAG7C,MAAM,OAAO,CAAC,MAAM;CACpB,MAAM,SAAS,CAAC,MAAM;CACtB,MAAM,SAAS,CAAC,MAAM;CACtB,IAAI,WAAW;AACf,KAAI,MAAM,IAAI;EACZ,IAAI,eAAe,MAAM,GAAG,MAAM,GAAG,EAAE;AACvC,SAAM,aAAa,SAAS,EAE1B,iBAAgB;AAElB,aAAW,CAAC;;CAGd,IAAI,QAAQ;AACZ,KAAI,MAAM,MAAM,MAAM,KAAK;EACzB,MAAM,SAAS,CAAC,MAAM;EACtB,MAAM,WAAW,EAAE,MAAM,OAAO;AAChC,WAAS,SAAS,KAAK,YAAY;AACnC,MAAI,MAAM,OAAO,IAAK,SAAQ,CAAC;;CAEjC,MAAM,OAAO,IAAI,KAAK,KAAK,IAAI,MAAM,OAAO,KAAK,MAAM,QAAQ,QAAQ,SAAS,CAAC;AACjF,KAAI,MAAO,MAAK,QAAQ,KAAK,SAAS,GAAG,MAAM;AAC/C,QAAO;;AAET,SAAS,uBAAuB,MAAM;AACpC,QAAO,KAAK,aAAa;;AAE3B,MAAa,YAAY;CACvB,KAAK;CACL,WAAW;CACX,UAAW,QAAQ;AACjB,SAAO,kBAAkB;;CAE3B,MAAM;CACN,WAAW;CACX,SAAS;CACV;;;ACtED,MAAa,gBAAgB;CAC3B,KAAK;CACL,MAAM;CACN,UAAW;AACT,SAAO;;CAET,YAAa;CAGb,UAAW,QAAQ;AACjB,SAAO,OAAO,WAAW;;CAE3B,YAAa;AACX,SAAO;;CAEV;;;ACCD,SAAS,cAAc,eAAe,eAAe;CACnD,MAAM,SAAS;EACb,0BAAU,IAAI,KAAK;EACnB,yBAAS,IAAI,KAAK;EAClB,wBAAQ,IAAI,KAAK;EACjB,0BAAU,IAAI,KAAK;EACpB;CACD,MAAM,cAAc,OAAO;AAC3B,MAAK,MAAM,QAAQ,CACjB,GAAG,eACH,GAAG,cACJ,EAAC;AACY,SAAO,KAAK,MACpB,IAAI,KAAK,KAAK,KAAK;AACvB,cAAY,IAAI,KAAK,KAAK,KAAK;;AAEjC,QAAO;;AAET,SAAS,aAAa,EAAE,gBAAgB,EAAE,EAAE,gBAAgB,EAAE,EAAE,WAAW;AACzE,KAAI,SAAS;AACX,gBAAc,KAAK,GAAG,QAAQ,cAAc;AAC5C,gBAAc,KAAK,GAAG,QAAQ,cAAc;;AAG9C,QAAO;EACL;EACA;EACA,SAJc,cAAc,eAAe,cAAc;EAK1D;;;;;;GAMC,MAAM,kBAAkB,aAAa,EACvC,eAAe;CACb;CACA;CACAC;CACD,EACF,CAAC;;;;;GAKE,MAAM,cAAc,aAAa;CACnC,eAAe;EACb;EACA;EACA;EACA;EACD;CACD,SAAS;CACV,CAAC;;;;;GAKE,MAAM,cAAc,aAAa,EACnC,SAAS,aACV,CAAC;;;GAGE,MAAa,iBAAiB,aAAa;CAC7C,eAAe;EACb;EACA;EACA;EACA;EACD;CACD,eAAe,CACb,WACAC,QACD;CACD,SAAS;CACV,CAAC;;;;;;;;;;;;;;;;;;;;GAoBE,MAAM,kBAAkB,aAAa;CACvC,eAAe,CACb,QACA,cACD;CACD,SAAS;CACV,CAAC;AACF,MAAa,aAAa,IAAI,IAAI;CAChC,CACE,QACA,YACD;CACD,CACE,WACA,eACD;CACD,CACE,YACA,gBACD;CACD,CACE,QACA,YACD;CACD,CACE,YACA,gBACD;CACF,CAAC;;;ACxIF,MAAM,kBAAkB;AACxB,MAAM,mBAAmB;AACzB,MAAM,mBAAmB;AACzB,MAAM,oBAAoB;AAC1B,MAAM,gBAAgB;AACtB,MAAM,iBAAiB;AACvB,MAAM,gBAAgB;AACtB,MAAM,wBACN;AACA,MAAM,gCAAgC;AACtC,MAAM,0BAA0B;AAChC,MAAM,qBAAqB;AAC3B,MAAM,kBAAkB;AACxB,MAAM,sBAAsB,IAAI,IAAI;CAClC,CACE,KACA,EACD;CACD,CACE,KACA,EACD;CACD,CACE,IACA,EACD;CACF,CAAC;AACF,MAAM,0BAA0B,IAAI,IAAI;CACtC,CACE,IACA,KACD;CACD,CACE,IACA,OACD;CACD,CACE,IACA,KACD;CACD,CACE,KACA,IACD;CACD,CACE,GACA,IACD;CACD,CACE,KACA,KACD;CACD,CACE,KACA,KACD;CACD,CACE,KACA,KACD;CACD,CACE,KACA,KACD;CACD,CACE,KACA,OACD;CACD,CACE,IACA,IACD;CACD,CACE,IACA,KACD;CACD,CACE,IACA,IACD;CACD,CACE,IACA,KACD;CACD,CACE,IACA,IACD;CACD,CACE,IACA,OACD;CACD,CACE,IACA,SACD;CACD,CACE,IACA,SACD;CACF,CAAC;;;GAGE,SAAS,oBAAoB,UAAU;AAEzC,KAAI,MAAQ,YAAY,YAAY,GAAM,QAAO,WAAW;CAE5D,MAAM,KAAK,WAAW;AAEtB,KAAI,MAAQ,MAAM,MAAM,IAAM,QAAO,KAAK,KAAO;AACjD,QAAO;;;;GAIL,SAAS,wBAAwB,UAAU;AAE7C,KAAI,MAAQ,YAAY,YAAY,GAAM,QAAO,WAAW;AAC5D,QAAO;;;;GAIL,SAAS,gBAAgB,WAAW;AAEtC,KAAI,aAAa,MAAQ,QAAO,OAAO,aAAa,UAAU;AAG9D,QAAO,OAAO,cAAc,YAAY,SAAY,MAAM,QAAS,YAAY,QAAW,QAAU,MAAO;;AAE7G,MAAM,SAAS;AACf,MAAM,aAAa;AACnB,MAAM,aAAa;AACnB,SAAS,WAAW,QAAQ,UAAU;AACpC,KAAI,CAAC,OAAQ,QAAO;CACpB,IAAI,QAAQ;CACZ,IAAI,MAAM;CACV,IAAI,OAAO;CACX,IAAI,OAAO;AACX,QAAM,QAAQ,KAAK,CAAC,WAAW,SAAS,OAAO,OAAO,QAAQ,EAAE,CAAC,EAAC;AAChE;AACA,MAAI,WAAW,QAAQ,aAAa,IAAI,GAAG;AACzC,UAAO;AACP,YAAS;AACT;;;AAGJ,QAAM,MAAM,OAAO,UAAU,CAAC,WAAW,SAAS,OAAO,OAAO,IAAI,CAAC,EAAC;AACpE;AACA,MAAI,MAAM,WAAW,aAAa,IAAI,GAAG;AACvC,UAAO;AACP,UAAO;AACP;;;CAGJ,MAAM,UAAU,OAAO,MAAM,OAAO,IAAI;CACxC,MAAM,SAAS,IAAI,OAAO,OAAO;CACjC,MAAM,cAAc,IAAI,OAAO,SAAS,WAAW,QAAQ,KAAK,OAAO;AACvE,QAAO,GAAG,SAAS,OAAO,UAAU,KAAK,IAAI,YAAY;;AAE3D,SAAS,aAAa,QAAQ,UAAU,MAAM,QAAQ;CACpD,IAAI,QAAQ,WAAW,OAAO,EAAE,WAAW,SAAS;CACpD,MAAM,UAAU,WAAW,QAAQ,SAAS;AAC5C,KAAI,QAAS,UAAS,MAAM;AAC5B,QAAO;;AAET,SAAS,gBAAgB,YAAY,cAAc;AACjD,KAAI,aAAa,aAAc,QAAO;AACtC,KAAI,aAAa,aAAc,QAAO;AACtC,QAAO;;AAET,SAAS,iBAAiB,OAAO;AAC/B,KAAI,UAAU,EAAG,QAAO;AACxB,KAAI,QAAQ,EAAG,QAAO,KAAK,OAAO,QAAQ,EAAE;AAC5C,QAAO;;AAET,IAAM,UAAN,MAAc;CACZ;CACA;CACA,WAAW;CACX,YAAY,QAAO;AAEjB,YAAU;AACV,OAAK,SAAS;AACd,QAAA,SAAe,OAAO;;CAExB,KAAK,SAAS,GAAG;AACf,SAAO,KAAK,OAAO,WAAW,KAAK,WAAW,OAAO;;CAEvD,OAAO;AACL,OAAK,YAAY;;CAEnB,MAAM;AACJ,SAAO,KAAK,YAAY,MAAA,SAAe;;;AAG3C,IAAa,cAAb,MAAyB;CACvB;CACA,aAAa;CACb,YAAY;CACZ,OAAO;CACP;CACA;CACA;CACA;CACA,kBAAkB;CAClB,yBAAS,IAAI,KAAK;CAClB,4BAAY,IAAI,KAAK;CACrB,YAAY,OAAO,EAAE,SAAS,gBAAgB,WAAW,qBAAqB,SAAQ;AACpF,QAAA,UAAgB,IAAI,QAAQ,MAAM;AAClC,OAAK,YAAY;AACjB,OAAK,qBAAqB;AAC1B,OAAK,gBAAgB,OAAO;AAC5B,OAAK,UAAU,OAAO;AACtB,OAAK,YAAY;;CAEnB,kBAAkB;EAChB,IAAI,KAAK,MAAA,QAAc,MAAM;AAC7B,SAAM,aAAa,GAAG,EAAC;AACrB,SAAA,QAAc,MAAM;AACpB,QAAK,MAAA,QAAc,MAAM;;;CAG7B,cAAc;EACZ,IAAI,KAAK,MAAA,QAAc,MAAM;AAC7B,MAAI,OAAA,GAAc;AAClB,QAAA,QAAc,MAAM;AACpB,OAAK,MAAA,QAAc,MAAM;AACzB,SAAM,OAAO,KAAK,CAAC,MAAM,GAAG,EAAC;AAC3B,SAAA,QAAc,MAAM;AACpB,QAAK,MAAA,QAAc,MAAM;;;CAG7B,aAAa;EACX,IAAI,KAAK,MAAA,QAAc,MAAM;AAC7B,SAAM,OAAA,IAAa;AACjB,QAAK,cAAc;AACnB,SAAA,QAAc,MAAM;AACpB,QAAK,MAAA,QAAc,MAAM;;;CAG7B,aAAa,SAAS;EACpB,MAAM,OAAO,aAAa,MAAA,QAAc,QAAQ,MAAA,QAAc,UAAU,KAAK,MAAM,MAAA,QAAc,WAAW,KAAK,UAAU;AAC3H,yBAAO,IAAI,YAAY,GAAG,QAAQ,GAAG,OAAO;;CAE9C,gBAAgB,SAAS;EACvB,MAAM,QAAQ,MAAA,YAAkB,QAAQ;AACxC,OAAK,YAAY,MAAM;;CAEzB,qBAAqB,MAAM;AACzB,MAAI,KAAK,WAAW,EAClB,OAAM,MAAA,YAAkB,4EAA4E;EAEtG,MAAM,QAAQ,uBAAuB,KAAK,KAAK,GAAG;AAClD,MAAI,UAAU,KACZ,OAAM,MAAA,YAAkB,oDAAoD;EAE9E,MAAM,QAAQ,SAAS,MAAM,IAAI,GAAG;EACpC,MAAM,QAAQ,SAAS,MAAM,IAAI,GAAG;AACpC,MAAI,UAAU,EACZ,OAAM,MAAA,YAAkB,0DAA0D;AAEpF,OAAK,kBAAkB,QAAQ;AAC/B,MAAI,UAAU,KAAK,UAAU,EAC3B,MAAK,gBAAgB,yDAAyD;AAEhF,SAAO,KAAK,MAAM;;CAEpB,oBAAoB,MAAM;AACxB,MAAI,KAAK,WAAW,EAClB,OAAM,MAAA,YAAkB,kFAAkF,KAAK,SAAS;EAE1H,MAAM,SAAS,KAAK;EACpB,MAAM,SAAS,KAAK;AACpB,MAAI,CAAC,mBAAmB,KAAK,OAAO,CAClC,OAAM,MAAA,YAAkB,uEAAuE,OAAO,GAAG;AAE3G,MAAI,KAAK,OAAO,IAAI,OAAO,CACzB,OAAM,MAAA,YAAkB,gEAAgE,OAAO,cAAc;AAE/G,MAAI,CAAC,gBAAgB,KAAK,OAAO,CAC/B,OAAM,MAAA,YAAkB,4FAA4F;AAEtH,OAAK,OAAO,IAAI,QAAQ,OAAO;;CAEjC,eAAe,OAAO,KAAK,WAAW;AACpC,MAAI,QAAQ,KAAK;GACf,MAAM,SAAS,MAAA,QAAc,OAAO,MAAM,OAAO,IAAI;AACrD,OAAI,UACF,MAAI,IAAI,WAAW,GAAG,WAAW,OAAO,QAAQ,YAAW;IACzD,MAAM,YAAY,OAAO,WAAW,SAAS;AAC7C,QAAI,EAAE,cAAc,KAAQ,MAAQ,aAAa,aAAa,SAC5D,OAAM,MAAA,YAAkB,4CAA4C,UAAU,GAAG;;YAG5E,sBAAsB,KAAK,OAAO,CAC3C,OAAM,MAAA,YAAkB,2CAA2C;AAErE,UAAO;;;CAGX,kBAAkB,KAAK,QAAQ,YAAY;EACzC,IAAI,WAAW;EACf,MAAM,SAAS,EAAE;AACjB,MAAI,WAAW,KAAM,MAAK,UAAU,IAAI,QAAQ,OAAO;EACvD,IAAI,KAAK,MAAA,QAAc,MAAM;AAC7B,SAAM,OAAO,GAAE;AACb,OAAI,OAAA,GACF;AAGF,OAAI,CAAC,kBADa,MAAA,QAAc,KAAK,EAAE,CACN,CAC/B;AAEF,cAAW;AACX,SAAA,QAAc,MAAM;AACpB,OAAI,KAAK,oBAAoB,MAAM,GAAG;QAChC,KAAK,cAAc,YAAY;AACjC,YAAO,KAAK,KAAK;AACjB,UAAK,MAAA,QAAc,MAAM;AACzB;;;GAGJ,MAAM,OAAO,KAAK;GAClB,MAAM,WAAW,KAAK,YAAY;IAChC,cAAc;IACd,aAAa;IACb,aAAa;IACb,cAAc;IACf,CAAC;AACF,OAAI,SAAU,QAAO,KAAK,SAAS,OAAO;AAC1C,QAAK,oBAAoB,MAAM,GAAG;AAClC,QAAK,MAAA,QAAc,MAAM;AACzB,QAAK,KAAK,SAAS,QAAQ,KAAK,aAAa,eAAe,OAAO,EACjE,OAAM,MAAA,YAAkB,kEAAkE;YACjF,KAAK,aAAa,WAC3B;;AAGJ,MAAI,SAAU,QAAO;GACnB;GACA;GACA,MAAM;GACN;GACD;;CAEH,cAAc,aAAa,QAAQ,iBAAiB;AAClD,MAAI,CAAC,SAAS,OAAO,CACnB,OAAM,MAAA,YAAkB,oEAAoE;AAE9F,OAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,OAAO,EAAC;AAChD,OAAI,OAAO,OAAO,aAAa,IAAI,CAAE;AACrC,UAAO,eAAe,aAAa,KAAK;IACtC;IACA,UAAU;IACV,YAAY;IACZ,cAAc;IACf,CAAC;AACF,mBAAgB,IAAI,IAAI;;;CAG5B,iBAAiB,QAAQ,iBAAiB,QAAQ,SAAS,WAAW,WAAW,UAAU;AAIzF,MAAI,MAAM,QAAQ,QAAQ,EAAE;AAC1B,aAAU,MAAM,UAAU,MAAM,KAAK,QAAQ;AAC7C,QAAI,IAAI,QAAQ,GAAG,QAAQ,QAAQ,QAAQ,SAAQ;AACjD,QAAI,MAAM,QAAQ,QAAQ,OAAO,CAC/B,OAAM,MAAA,YAAkB,yEAAyE;AAEnG,QAAI,OAAO,YAAY,YAAY,cAAc,QAAQ,OAAO,CAC9D,SAAQ,SAAS;;;AAOvB,MAAI,OAAO,YAAY,YAAY,cAAc,QAAQ,CACvD,WAAU;AAEZ,YAAU,OAAO,QAAQ;AACzB,MAAI,WAAW,0BACb,KAAI,MAAM,QAAQ,UAAU,CAC1B,MAAI,IAAI,QAAQ,GAAG,QAAQ,UAAU,QAAQ,QAC3C,MAAK,cAAc,QAAQ,UAAU,QAAQ,gBAAgB;MAG/D,MAAK,cAAc,QAAQ,WAAW,gBAAgB;OAEnD;AACL,OAAI,CAAC,KAAK,sBAAsB,CAAC,gBAAgB,IAAI,QAAQ,IAAI,OAAO,OAAO,QAAQ,QAAQ,EAAE;AAC/F,SAAK,OAAO,aAAa,KAAK;AAC9B,UAAA,QAAc,WAAW,YAAY,MAAA,QAAc;AACnD,UAAM,MAAA,YAAkB,4CAA4C;;AAEtE,UAAO,eAAe,QAAQ,SAAS;IACrC,OAAO;IACP,UAAU;IACV,YAAY;IACZ,cAAc;IACf,CAAC;AACF,mBAAgB,OAAO,QAAQ;;AAEjC,SAAO;;CAET,gBAAgB;EACd,MAAM,KAAK,MAAA,QAAc,MAAM;AAC/B,MAAI,OAAA,GACF,OAAA,QAAc,MAAM;WACX,OAAA,IAAwB;AACjC,SAAA,QAAc,MAAM;AACpB,OAAI,MAAA,QAAc,MAAM,KAAA,GACtB,OAAA,QAAc,MAAM;QAGtB,OAAM,MAAA,YAAkB,yCAAyC;AAEnE,OAAK,QAAQ;AACb,OAAK,YAAY,MAAA,QAAc;;CAEjC,oBAAoB,eAAe,aAAa;EAC9C,IAAI,aAAa;EACjB,IAAI,KAAK,MAAA,QAAc,MAAM;AAC7B,SAAM,OAAO,GAAE;AACb,QAAK,iBAAiB;AACtB,QAAK,MAAA,QAAc,MAAM;AACzB,OAAI,eAAe;AACjB,SAAK,aAAa;AAClB,SAAK,MAAA,QAAc,MAAM;;AAE3B,OAAI,MAAM,GAAG,EAAE;AACb,SAAK,eAAe;AACpB,SAAK,MAAA,QAAc,MAAM;AACzB;AACA,SAAK,aAAa;AAClB,SAAK,YAAY;AACjB,SAAK,MAAA,QAAc,MAAM;SAEzB;;AAGJ,MAAI,gBAAgB,MAAM,eAAe,KAAK,KAAK,aAAa,YAC9D,MAAK,gBAAgB,wBAAwB;AAE/C,SAAO;;CAET,wBAAwB;EACtB,IAAI,KAAK,MAAA,QAAc,MAAM;AAG7B,OAAK,OAAA,MAAgB,OAAA,OAAe,OAAO,MAAA,QAAc,KAAK,EAAE,IAAI,OAAO,MAAA,QAAc,KAAK,EAAE,EAAE;AAChG,QAAK,MAAA,QAAc,KAAK,EAAE;AAC1B,OAAI,OAAO,KAAK,kBAAkB,GAAG,CACnC,QAAO;;AAGX,SAAO;;CAET,gBAAgB,KAAK,QAAQ,YAAY,sBAAsB;EAC7D,IAAI,KAAK,MAAA,QAAc,MAAM;AAC7B,MAAI,kBAAkB,GAAG,IAAI,gBAAgB,GAAG,IAAI,OAAA,MAAgB,OAAA,MAAoB,OAAA,MAAmB,OAAA,MAAsB,OAAA,OAAwB,OAAA,MAAuB,OAAA,MAAuB,OAAA,MAAuB,OAAA,MAAkB,OAAA,MAAwB,OAAA,GACtQ;EAEF,IAAI;AACJ,MAAI,OAAA,MAAmB,OAAA,IAAc;AACnC,eAAY,MAAA,QAAc,KAAK,EAAE;AACjC,OAAI,kBAAkB,UAAU,IAAI,wBAAwB,gBAAgB,UAAU,CACpF;;EAGJ,IAAI,SAAS;EACb,IAAI,aAAa,MAAA,QAAc;EAC/B,IAAI,eAAe,MAAA,QAAc;EACjC,IAAI,oBAAoB;EACxB,IAAI,OAAO;AACX,SAAM,OAAO,GAAE;AACb,OAAI,OAAA,IAAc;AAChB,gBAAY,MAAA,QAAc,KAAK,EAAE;AACjC,QAAI,kBAAkB,UAAU,IAAI,wBAAwB,gBAAgB,UAAU,CACpF;cAEO,OAAA;QAEL,kBADc,MAAA,QAAc,KAAK,GAAG,CACR,CAC9B;cAEO,MAAA,QAAc,aAAa,KAAK,aAAa,KAAK,uBAAuB,IAAI,wBAAwB,gBAAgB,GAAG,CACjI;YACS,MAAM,GAAG,EAAE;AACpB,WAAO,KAAK;IACZ,MAAM,YAAY,KAAK;IACvB,MAAM,aAAa,KAAK;AACxB,SAAK,oBAAoB,OAAO,GAAG;AACnC,QAAI,KAAK,cAAc,YAAY;AACjC,yBAAoB;AACpB,UAAK,MAAA,QAAc,MAAM;AACzB;WACK;AACL,WAAA,QAAc,WAAW;AACzB,UAAK,OAAO;AACZ,UAAK,YAAY;AACjB,UAAK,aAAa;AAClB;;;AAGJ,OAAI,mBAAmB;IACrB,MAAM,UAAU,KAAK,eAAe,cAAc,YAAY,MAAM;AACpE,QAAI,QAAS,WAAU;AACvB,cAAU,iBAAiB,KAAK,OAAO,KAAK;AAC5C,mBAAe,aAAa,MAAA,QAAc;AAC1C,wBAAoB;;AAEtB,OAAI,CAAC,aAAa,GAAG,CACnB,cAAa,MAAA,QAAc,WAAW;AAExC,SAAA,QAAc,MAAM;AACpB,QAAK,MAAA,QAAc,MAAM;;EAE3B,MAAM,UAAU,KAAK,eAAe,cAAc,YAAY,MAAM;AACpE,MAAI,QAAS,WAAU;AACvB,MAAI,WAAW,KAAM,MAAK,UAAU,IAAI,QAAQ,OAAO;AACvD,MAAI,OAAQ,QAAO;GACjB;GACA;GACA,MAAM;GACN;GACD;;CAEH,uBAAuB,KAAK,QAAQ,YAAY;EAC9C,IAAI,KAAK,MAAA,QAAc,MAAM;AAC7B,MAAI,OAAA,GAAqB;EACzB,IAAI,SAAS;AACb,QAAA,QAAc,MAAM;EACpB,IAAI,eAAe,MAAA,QAAc;EACjC,IAAI,aAAa,MAAA,QAAc;AAC/B,OAAK,MAAA,QAAc,MAAM;AACzB,SAAM,OAAO,GAAE;AACb,OAAI,OAAA,IAAqB;IACvB,MAAM,UAAU,KAAK,eAAe,cAAc,MAAA,QAAc,UAAU,KAAK;AAC/E,QAAI,QAAS,WAAU;AACvB,UAAA,QAAc,MAAM;AACpB,SAAK,MAAA,QAAc,MAAM;AACzB,QAAI,OAAA,IAAqB;AACvB,oBAAe,MAAA,QAAc;AAC7B,WAAA,QAAc,MAAM;AACpB,kBAAa,MAAA,QAAc;WACtB;AACL,SAAI,WAAW,KAAM,MAAK,UAAU,IAAI,QAAQ,OAAO;AACvD,YAAO;MACL;MACA;MACA,MAAM;MACN;MACD;;cAEM,MAAM,GAAG,EAAE;IACpB,MAAM,UAAU,KAAK,eAAe,cAAc,YAAY,KAAK;AACnE,QAAI,QAAS,WAAU;AACvB,cAAU,iBAAiB,KAAK,oBAAoB,OAAO,WAAW,CAAC;AACvE,mBAAe,aAAa,MAAA,QAAc;cACjC,MAAA,QAAc,aAAa,KAAK,aAAa,KAAK,uBAAuB,CAClF,OAAM,MAAA,YAAkB,+DAA+D;QAClF;AACL,UAAA,QAAc,MAAM;AACpB,iBAAa,MAAA,QAAc;;AAE7B,QAAK,MAAA,QAAc,MAAM;;AAE3B,QAAM,MAAA,YAAkB,6DAA6D;;CAEvF,uBAAuB,KAAK,QAAQ,YAAY;EAC9C,IAAI,KAAK,MAAA,QAAc,MAAM;AAC7B,MAAI,OAAA,GAAqB;EACzB,IAAI,SAAS;AACb,QAAA,QAAc,MAAM;EACpB,IAAI,aAAa,MAAA,QAAc;EAC/B,IAAI,eAAe,MAAA,QAAc;EACjC,IAAI;AACJ,OAAK,MAAA,QAAc,MAAM;AACzB,SAAM,OAAO,GAAE;AACb,OAAI,OAAA,IAAqB;IACvB,MAAM,UAAU,KAAK,eAAe,cAAc,MAAA,QAAc,UAAU,KAAK;AAC/E,QAAI,QAAS,WAAU;AACvB,UAAA,QAAc,MAAM;AACpB,QAAI,WAAW,KAAM,MAAK,UAAU,IAAI,QAAQ,OAAO;AACvD,WAAO;KACL;KACA;KACA,MAAM;KACN;KACD;;AAEH,OAAI,OAAA,IAAkB;IACpB,MAAM,UAAU,KAAK,eAAe,cAAc,MAAA,QAAc,UAAU,KAAK;AAC/E,QAAI,QAAS,WAAU;AACvB,UAAA,QAAc,MAAM;AACpB,SAAK,MAAA,QAAc,MAAM;AACzB,QAAI,MAAM,GAAG,CACX,MAAK,oBAAoB,OAAO,WAAW;aAClC,KAAK,OAAO,wBAAwB,IAAI,GAAG,EAAE;AACtD,eAAU,wBAAwB,IAAI,GAAG;AACzC,WAAA,QAAc,MAAM;gBACV,MAAM,oBAAoB,IAAI,GAAG,IAAI,KAAK,GAAG;KACvD,IAAI,YAAY;KAChB,IAAI,YAAY;AAChB,YAAM,YAAY,GAAG,aAAY;AAC/B,YAAA,QAAc,MAAM;AACpB,WAAK,MAAA,QAAc,MAAM;AACzB,WAAK,MAAM,oBAAoB,GAAG,KAAK,EACrC,cAAa,aAAa,KAAK;UAE/B,OAAM,MAAA,YAAkB,mEAAmE;;AAG/F,eAAU,gBAAgB,UAAU;AACpC,WAAA,QAAc,MAAM;UAEpB,OAAM,MAAA,YAAkB,4DAA4D;AAEtF,mBAAe,aAAa,MAAA,QAAc;cACjC,MAAM,GAAG,EAAE;IACpB,MAAM,UAAU,KAAK,eAAe,cAAc,YAAY,KAAK;AACnE,QAAI,QAAS,WAAU;AACvB,cAAU,iBAAiB,KAAK,oBAAoB,OAAO,WAAW,CAAC;AACvE,mBAAe,aAAa,MAAA,QAAc;cACjC,MAAA,QAAc,aAAa,KAAK,aAAa,KAAK,uBAAuB,CAClF,OAAM,MAAA,YAAkB,+DAA+D;QAClF;AACL,UAAA,QAAc,MAAM;AACpB,iBAAa,MAAA,QAAc;;AAE7B,QAAK,MAAA,QAAc,MAAM;;AAE3B,QAAM,MAAA,YAAkB,6DAA6D;;CAEvF,mBAAmB,KAAK,QAAQ,YAAY;EAC1C,IAAI,KAAK,MAAA,QAAc,MAAM;EAC7B,IAAI;EACJ,IAAI,YAAY;EAChB,IAAI,SAAS,EAAE;AACf,MAAI,OAAA,IAA4B;AAC9B,gBAAA;AACA,eAAY;AACZ,YAAS,EAAE;aACF,OAAA,IACT,cAAA;MAEA;AAEF,MAAI,WAAW,KAAM,MAAK,UAAU,IAAI,QAAQ,OAAO;AACvD,QAAA,QAAc,MAAM;AACpB,OAAK,MAAA,QAAc,MAAM;EACzB,IAAI,WAAW;EACf,IAAI,YAAY;EAChB,IAAI,UAAU;EACd,IAAI,SAAS;EACb,IAAI,iBAAiB;EACrB,IAAI,SAAS;EACb,IAAI,YAAY;EAChB,IAAI,OAAO;EACX,MAAM,kCAAkB,IAAI,KAAK;AACjC,SAAM,OAAO,GAAE;AACb,QAAK,oBAAoB,MAAM,WAAW;AAC1C,QAAK,MAAA,QAAc,MAAM;AACzB,OAAI,OAAO,YAAY;AACrB,UAAA,QAAc,MAAM;AAEpB,WAAO;KACL;KACA;KACA,MAJW,YAAY,YAAY;KAKnC;KACD;;AAEH,OAAI,CAAC,SACH,OAAM,MAAA,YAAkB,6EAA6E;AAEvG,YAAS,UAAU,YAAY;AAC/B,YAAS,iBAAiB;AAC1B,OAAI,OAAA,IAAiB;AACnB,gBAAY,MAAA,QAAc,KAAK,EAAE;AACjC,QAAI,kBAAkB,UAAU,EAAE;AAChC,cAAS,iBAAiB;AAC1B,WAAA,QAAc,MAAM;AACpB,UAAK,oBAAoB,MAAM,WAAW;;;AAG9C,UAAO,KAAK;GACZ,MAAM,WAAW,KAAK,YAAY;IAChC,cAAc;IACd,aAAa;IACb,aAAa;IACb,cAAc;IACf,CAAC;AACF,OAAI,UAAU;AACZ,aAAS,SAAS,OAAO;AACzB,cAAU,SAAS;;AAErB,QAAK,oBAAoB,MAAM,WAAW;AAC1C,QAAK,MAAA,QAAc,MAAM;AACzB,QAAK,kBAAkB,KAAK,SAAS,SAAS,OAAA,IAAc;AAC1D,aAAS;AACT,UAAA,QAAc,MAAM;AACpB,SAAK,MAAA,QAAc,MAAM;AACzB,SAAK,oBAAoB,MAAM,WAAW;IAC1C,MAAM,WAAW,KAAK,YAAY;KAChC,cAAc;KACd,aAAa;KACb,aAAa;KACb,cAAc;KACf,CAAC;AACF,QAAI,SAAU,aAAY,SAAS;;AAErC,OAAI,UACF,MAAK,iBAAiB,QAAQ,iBAAiB,QAAQ,SAAS,UAAU;YACjE,OACT,QAAO,KAAK,KAAK,iBAAiB,EAAE,EAAE,iBAAiB,QAAQ,SAAS,UAAU,CAAC;OAEnF,QAAO,KAAK,QAAQ;AAEtB,QAAK,oBAAoB,MAAM,WAAW;AAC1C,QAAK,MAAA,QAAc,MAAM;AACzB,OAAI,OAAA,IAAc;AAChB,eAAW;AACX,UAAA,QAAc,MAAM;AACpB,SAAK,MAAA,QAAc,MAAM;SAEzB,YAAW;;AAGf,QAAM,MAAA,YAAkB,qFAAqF;;CAI/G,gBAAgB,KAAK,QAAQ,YAAY;EACvC,IAAI,WAAW;EACf,IAAI,iBAAiB;EACrB,IAAI,iBAAiB;EACrB,IAAI,aAAa;EACjB,IAAI,aAAa;EACjB,IAAI,iBAAiB;EACrB,IAAI,KAAK,MAAA,QAAc,MAAM;EAC7B,IAAI,UAAU;AACd,MAAI,OAAA,IACF,WAAU;WACD,OAAA,GACT,WAAU;MAEV;EAEF,IAAI,SAAS;EACb,IAAI,MAAM;AACV,SAAM,OAAO,GAAE;AACb,SAAA,QAAc,MAAM;AACpB,QAAK,MAAA,QAAc,MAAM;AACzB,OAAI,OAAA,MAAe,OAAA,GACjB,KAAI,kBAAkB,SACpB,YAAW,OAAA,KAAc,gBAAgB;OAEzC,OAAM,MAAA,YAAkB,uDAAuD;aAEvE,MAAM,wBAAwB,GAAG,KAAK,EAChD,KAAI,QAAQ,EACV,OAAM,MAAA,YAAkB,8DAA8D;YAC7E,CAAC,gBAAgB;AAC1B,iBAAa,aAAa,MAAM;AAChC,qBAAiB;SAEjB,OAAM,MAAA,YAAkB,2DAA2D;OAGrF;;AAGJ,MAAI,aAAa,GAAG,EAAE;AACpB,QAAK,iBAAiB;AACtB,QAAK,aAAa;AAClB,QAAK,MAAA,QAAc,MAAM;;AAE3B,SAAM,OAAO,GAAE;AACb,QAAK,eAAe;AACpB,QAAK,aAAa;AAClB,QAAK,MAAA,QAAc,MAAM;AACzB,WAAO,CAAC,kBAAkB,KAAK,aAAa,eAAe,OAAA,IAAa;AACtE,SAAK;AACL,UAAA,QAAc,MAAM;AACpB,SAAK,MAAA,QAAc,MAAM;;AAE3B,OAAI,CAAC,kBAAkB,KAAK,aAAa,WACvC,cAAa,KAAK;AAEpB,OAAI,MAAM,GAAG,EAAE;AACb;AACA;;AAGF,OAAI,KAAK,aAAa,YAAY;AAEhC,QAAI,aAAa,cACf,WAAU,KAAK,OAAO,iBAAiB,IAAI,aAAa,WAAW;aAC1D,aAAa;SAClB,eAEF,WAAU;;AAGd;;AAGF,OAAI,QAEF,KAAI,aAAa,GAAG,EAAE;AACpB,qBAAiB;AAEjB,cAAU,KAAK,OAAO,iBAAiB,IAAI,aAAa,WAAW;cAE1D,gBAAgB;AACzB,qBAAiB;AACjB,cAAU,KAAK,OAAO,aAAa,EAAE;cAE5B,eAAe;QACpB,eAEF,WAAU;SAIZ,WAAU,KAAK,OAAO,WAAW;OAKnC,WAAU,KAAK,OAAO,iBAAiB,IAAI,aAAa,WAAW;AAErE,oBAAiB;AACjB,oBAAiB;AACjB,gBAAa;GACb,MAAM,eAAe,MAAA,QAAc;AACnC,UAAM,CAAC,MAAM,GAAG,IAAI,OAAO,GAAE;AAC3B,UAAA,QAAc,MAAM;AACpB,SAAK,MAAA,QAAc,MAAM;;GAE3B,MAAM,UAAU,KAAK,eAAe,cAAc,MAAA,QAAc,UAAU,MAAM;AAChF,OAAI,QAAS,WAAU;;AAEzB,MAAI,WAAW,KAAM,MAAK,UAAU,IAAI,QAAQ,OAAO;AACvD,SAAO;GACL;GACA;GACA,MAAM;GACN;GACD;;CAEH,iBAAiB,KAAK,QAAQ,YAAY,YAAY;EACpD,MAAM,SAAS,EAAE;EACjB,MAAM,kCAAkB,IAAI,KAAK;EACjC,IAAI,eAAe;EACnB,IAAI;EACJ,IAAI;EACJ,IAAI,SAAS;EACb,IAAI,UAAU;EACd,IAAI,YAAY;EAChB,IAAI,gBAAgB;EACpB,IAAI,WAAW;AACf,MAAI,WAAW,KAAM,MAAK,UAAU,IAAI,QAAQ,OAAO;EACvD,IAAI,KAAK,MAAA,QAAc,MAAM;AAC7B,SAAM,OAAO,GAAE;GACb,MAAM,YAAY,MAAA,QAAc,KAAK,EAAE;AACvC,UAAO,KAAK;AACZ,SAAM,MAAA,QAAc;AAKpB,QAAK,OAAA,MAAmB,OAAA,OAAiB,kBAAkB,UAAU,EAAE;AACrE,QAAI,OAAA,IAAiB;AACnB,SAAI,eAAe;AACjB,WAAK,iBAAiB,QAAQ,iBAAiB,QAAQ,SAAS,KAAK;AACrE,eAAS;AACT,gBAAU;AACV,kBAAY;;AAEd,gBAAW;AACX,qBAAgB;AAChB,oBAAe;eACN,eAAe;AAExB,qBAAgB;AAChB,oBAAe;UAEf,OAAM,MAAA,YAAkB,2HAA2H;AAErJ,UAAA,QAAc,MAAM;AACpB,SAAK;UAIA;IACL,MAAM,WAAW,KAAK,YAAY;KAChC,cAAc;KACd,aAAa;KACb,aAAa;KACb,cAAc;KACf,CAAC;AACF,QAAI,CAAC,SAAU;AACf,QAAI,KAAK,SAAS,MAAM;AACtB,UAAK,MAAA,QAAc,MAAM;AACzB,UAAK,iBAAiB;AACtB,UAAK,MAAA,QAAc,MAAM;AACzB,SAAI,OAAA,IAAc;AAChB,YAAA,QAAc,MAAM;AACpB,WAAK,MAAA,QAAc,MAAM;AACzB,UAAI,CAAC,kBAAkB,GAAG,CACxB,OAAM,MAAA,YAAkB,6GAA6G;AAEvI,UAAI,eAAe;AACjB,YAAK,iBAAiB,QAAQ,iBAAiB,QAAQ,SAAS,KAAK;AACrE,gBAAS;AACT,iBAAU;AACV,mBAAY;;AAEd,iBAAW;AACX,sBAAgB;AAChB,qBAAe;AACf,eAAS,SAAS;AAClB,gBAAU,SAAS;gBACV,SACT,OAAM,MAAA,YAAkB,sDAAsD;UACzE;MACL,MAAM,EAAE,MAAM,WAAW;AACzB,aAAO;OACL;OACA;OACA;OACA;OACD;;eAEM,SACT,OAAM,MAAA,YAAkB,gFAAgF;SACnG;KACL,MAAM,EAAE,MAAM,WAAW;AACzB,YAAO;MACL;MACA;MACA;MACA;MACD;;;AAML,OAAI,KAAK,SAAS,QAAQ,KAAK,aAAa,YAAY;IACtD,MAAM,WAAW,KAAK,YAAY;KAChC,cAAc;KACd,aAAa;KACb,aAAa;KACb;KACD,CAAC;AACF,QAAI,SACF,KAAI,cACF,WAAU,SAAS;QAEnB,aAAY,SAAS;AAGzB,QAAI,CAAC,eAAe;AAClB,UAAK,iBAAiB,QAAQ,iBAAiB,QAAQ,SAAS,WAAW,MAAM,IAAI;AACrF,cAAS,UAAU,YAAY;;AAEjC,SAAK,oBAAoB,MAAM,GAAG;AAClC,SAAK,MAAA,QAAc,MAAM;;AAE3B,OAAI,KAAK,aAAa,cAAc,OAAO,EACzC,OAAM,MAAA,YAAkB,wDAAwD;YACvE,KAAK,aAAa,WAC3B;;AAOJ,MAAI,cACF,MAAK,iBAAiB,QAAQ,iBAAiB,QAAQ,SAAS,KAAK;AAGvE,MAAI,SAAU,QAAO;GACnB;GACA;GACA,MAAM;GACN;GACD;;CAEH,gBAAgB,KAAK;EACnB,IAAI,aAAa;EACjB,IAAI,UAAU;EACd,IAAI,YAAY;EAChB,IAAI;EACJ,IAAI,KAAK,MAAA,QAAc,MAAM;AAC7B,MAAI,OAAA,GAAoB;AACxB,MAAI,QAAQ,KACV,OAAM,MAAA,YAAkB,0DAA0D;AAEpF,QAAA,QAAc,MAAM;AACpB,OAAK,MAAA,QAAc,MAAM;AACzB,MAAI,OAAA,IAAqB;AACvB,gBAAa;AACb,SAAA,QAAc,MAAM;AACpB,QAAK,MAAA,QAAc,MAAM;aAChB,OAAA,IAAoB;AAC7B,aAAU;AACV,eAAY;AACZ,SAAA,QAAc,MAAM;AACpB,QAAK,MAAA,QAAc,MAAM;QAEzB,aAAY;EAEd,IAAI,WAAW,MAAA,QAAc;AAC7B,MAAI,YAAY;AACd,MAAG;AACD,UAAA,QAAc,MAAM;AACpB,SAAK,MAAA,QAAc,MAAM;YACnB,OAAO,KAAK,OAAA;AACpB,OAAI,CAAC,MAAA,QAAc,KAAK,EAAE;AACxB,cAAU,MAAA,QAAc,OAAO,MAAM,UAAU,MAAA,QAAc,SAAS;AACtE,UAAA,QAAc,MAAM;AACpB,SAAK,MAAA,QAAc,MAAM;SAEzB,OAAM,MAAA,YAAkB,qDAAqD;SAE1E;AACL,UAAM,OAAO,KAAK,CAAC,kBAAkB,GAAG,EAAC;AACvC,QAAI,OAAA,GACF,KAAI,CAAC,SAAS;AACZ,iBAAY,MAAA,QAAc,OAAO,MAAM,WAAW,GAAG,MAAA,QAAc,WAAW,EAAE;AAChF,SAAI,CAAC,mBAAmB,KAAK,UAAU,CACrC,OAAM,MAAA,YAAkB,yEAAyE;AAEnG,eAAU;AACV,gBAAW,MAAA,QAAc,WAAW;UAEpC,OAAM,MAAA,YAAkB,0EAA0E;AAGtG,UAAA,QAAc,MAAM;AACpB,SAAK,MAAA,QAAc,MAAM;;AAE3B,aAAU,MAAA,QAAc,OAAO,MAAM,UAAU,MAAA,QAAc,SAAS;AACtE,OAAI,wBAAwB,KAAK,QAAQ,CACvC,OAAM,MAAA,YAAkB,gFAAgF;;AAG5G,MAAI,WAAW,CAAC,gBAAgB,KAAK,QAAQ,CAC3C,OAAM,MAAA,YAAkB,6DAA6D,QAAQ,GAAG;AAElG,MAAI,WACF,QAAO;WACE,KAAK,OAAO,IAAI,UAAU,CACnC,QAAO,KAAK,OAAO,IAAI,UAAU,GAAG;WAC3B,cAAc,IACvB,QAAO,IAAI;WACF,cAAc,KACvB,QAAO,qBAAqB;AAE9B,QAAM,MAAA,YAAkB,oDAAoD,UAAU,GAAG;;CAE3F,mBAAmB,QAAQ;EACzB,IAAI,KAAK,MAAA,QAAc,MAAM;AAC7B,MAAI,OAAA,GAAkB;AACtB,MAAI,WAAW,KACb,OAAM,MAAA,YAAkB,yDAAyD;AAEnF,QAAA,QAAc,MAAM;AACpB,OAAK,MAAA,QAAc,MAAM;EACzB,MAAM,WAAW,MAAA,QAAc;AAC/B,SAAM,OAAO,KAAK,CAAC,kBAAkB,GAAG,IAAI,CAAC,gBAAgB,GAAG,EAAC;AAC/D,SAAA,QAAc,MAAM;AACpB,QAAK,MAAA,QAAc,MAAM;;AAE3B,MAAI,MAAA,QAAc,aAAa,SAC7B,OAAM,MAAA,YAAkB,0FAA0F;AAEpH,SAAO,MAAA,QAAc,OAAO,MAAM,UAAU,MAAA,QAAc,SAAS;;CAErE,YAAY;AACV,MAAI,MAAA,QAAc,MAAM,KAAA,GAAe;AACvC,QAAA,QAAc,MAAM;EACpB,IAAI,KAAK,MAAA,QAAc,MAAM;EAC7B,MAAM,WAAW,MAAA,QAAc;AAC/B,SAAM,OAAO,KAAK,CAAC,kBAAkB,GAAG,IAAI,CAAC,gBAAgB,GAAG,EAAC;AAC/D,SAAA,QAAc,MAAM;AACpB,QAAK,MAAA,QAAc,MAAM;;AAE3B,MAAI,MAAA,QAAc,aAAa,SAC7B,OAAM,MAAA,YAAkB,oEAAoE;EAE9F,MAAM,QAAQ,MAAA,QAAc,OAAO,MAAM,UAAU,MAAA,QAAc,SAAS;AAC1E,MAAI,CAAC,KAAK,UAAU,IAAI,MAAM,CAC5B,OAAM,MAAA,YAAkB,0CAA0C,MAAM,GAAG;AAE7E,OAAK,oBAAoB,MAAM,GAAG;AAClC,SAAO,KAAK,UAAU,IAAI,MAAM;;CAElC,WAAW,OAAO;AAChB,UAAO,MAAM,KAAb;GACE,KAAK;GACL,KAAK,IACH,QAAO;GACT,KAAK;AAED,SAAK,MAAM,QAAQ,KAAK,eAAc;AAIpC,SAAI,CAAC,KAAK,QAAQ,MAAM,OAAO,CAAE;KAEjC,MAAM,SAAS,KAAK,UAAU,MAAM,OAAO;AAC3C,WAAM,SAAS;AACf,WAAM,MAAM,KAAK;KACjB,MAAM,EAAE,WAAW;AACnB,SAAI,WAAW,KAAM,MAAK,UAAU,IAAI,QAAQ,OAAO;AACvD,YAAO;;AAET,WAAO;;EAGb,MAAM,OAAO,MAAM,QAAQ;EAE3B,MAAM,OADM,KAAK,QAAQ,MACR,IAAI,MAAM,IAAI;AAC/B,MAAI,CAAC,KACH,OAAM,MAAA,YAAkB,gCAAgC,MAAM,IAAI,GAAG;AAEvE,MAAI,MAAM,WAAW,QAAQ,KAAK,SAAS,MAAM,KAC/C,OAAM,MAAA,YAAkB,gCAAgC,MAAM,IAAI,uBAAuB,KAAK,KAAK,UAAU,MAAM,KAAK,GAAG;AAE7H,MAAI,CAAC,KAAK,QAAQ,MAAM,OAAO,CAE7B,OAAM,MAAA,YAAkB,gCAAgC,MAAM,IAAI,gBAAgB;EAEpF,MAAM,SAAS,KAAK,UAAU,MAAM,OAAO;AAC3C,QAAM,SAAS;EACf,MAAM,EAAE,WAAW;AACnB,MAAI,WAAW,KAAM,MAAK,UAAU,IAAI,QAAQ,OAAO;AACvD,SAAO;;CAET,YAAY,EAAE,cAAc,aAAa,aAAa,gBAAgB;EACpE,IAAI,eAAe;EACnB,IAAI,YAAY;EAChB,MAAM,oBAAoB,sBAAsB,eAAe,qBAAqB;EACpF,IAAI,wBAAwB;EAC5B,MAAM,mBAAmB;AACzB,MAAI;OACE,KAAK,oBAAoB,MAAM,GAAG,EAAE;AACtC,gBAAY;AACZ,mBAAe,gBAAgB,KAAK,YAAY,aAAa;;;EAGjE,IAAI,MAAM;EACV,IAAI,SAAS;AACb,MAAI,iBAAiB,EACnB,QAAM,MAAK;GACT,MAAM,SAAS,KAAK,gBAAgB,IAAI;AACxC,OAAI,OACF,OAAM;QACD;IACL,MAAM,YAAY,KAAK,mBAAmB,OAAO;AACjD,QAAI,CAAC,UAAW;AAChB,aAAS;;AAEX,OAAI,KAAK,oBAAoB,MAAM,GAAG,EAAE;AACtC,gBAAY;AACZ,4BAAwB;AACxB,mBAAe,gBAAgB,KAAK,YAAY,aAAa;SAE7D,yBAAwB;;AAI9B,MAAI,sBACF,yBAAwB,aAAa;AAEvC,MAAI,iBAAiB,GAAG;GAEtB,MAAM,aADO,oBAAoB,eAAe,qBAAqB,cAC3C,eAAe,eAAe;AACxD,OAAI,uBAAuB;IACzB,MAAM,cAAc,MAAA,QAAc,WAAW,KAAK;IAClD,MAAM,qBAAqB,KAAK,kBAAkB,KAAK,QAAQ,YAAY;AAC3E,QAAI,mBAAoB,QAAO,KAAK,WAAW,mBAAmB;IAClE,MAAM,oBAAoB,KAAK,iBAAiB,KAAK,QAAQ,aAAa,WAAW;AACrF,QAAI,kBAAmB,QAAO,KAAK,WAAW,kBAAkB;;GAElE,MAAM,sBAAsB,KAAK,mBAAmB,KAAK,QAAQ,WAAW;AAC5E,OAAI,oBAAqB,QAAO,KAAK,WAAW,oBAAoB;AACpE,OAAI,mBAAmB;IACrB,MAAM,mBAAmB,KAAK,gBAAgB,KAAK,QAAQ,WAAW;AACtE,QAAI,iBAAkB,QAAO,KAAK,WAAW,iBAAiB;;GAEhE,MAAM,mBAAmB,KAAK,uBAAuB,KAAK,QAAQ,WAAW;AAC7E,OAAI,iBAAkB,QAAO,KAAK,WAAW,iBAAiB;GAC9D,MAAM,mBAAmB,KAAK,uBAAuB,KAAK,QAAQ,WAAW;AAC7E,OAAI,iBAAkB,QAAO,KAAK,WAAW,iBAAiB;GAC9D,MAAM,QAAQ,KAAK,WAAW;AAC9B,OAAI,OAAO;AACT,QAAI,QAAQ,QAAQ,WAAW,KAC7B,OAAM,MAAA,YAAkB,iEAAiE;AAE3F,WAAO,KAAK,WAAW;KACrB;KACA;KACA,MAAM;KACN,QAAQ;KACT,CAAC;;GAEJ,MAAM,mBAAmB,KAAK,gBAAgB,KAAK,QAAQ,YAAY,oBAAoB,YAAY;AACvG,OAAI,kBAAkB;AACpB,qBAAiB,QAAQ;AACzB,WAAO,KAAK,WAAW,iBAAiB;;aAEjC,iBAAiB,KAAK,sBAAsB,eAAe,uBAAuB;GAG3F,MAAM,cAAc,MAAA,QAAc,WAAW,KAAK;GAClD,MAAM,WAAW,KAAK,kBAAkB,KAAK,QAAQ,YAAY;AACjE,OAAI,SAAU,QAAO,KAAK,WAAW,SAAS;;EAEhD,MAAM,WAAW,KAAK,WAAW;GAC/B;GACA;GACA,MAAM;GACN,QAAQ;GACT,CAAC;AACF,MAAI,SAAS,QAAQ,QAAQ,SAAS,WAAW,KAAM,QAAO;;CAEhE,iBAAiB;EACf,IAAI,gBAAgB;EACpB,IAAI,UAAU;EACd,IAAI,KAAK,MAAA,QAAc,MAAM;AAC7B,SAAM,OAAO,GAAE;AACb,QAAK,oBAAoB,MAAM,GAAG;AAClC,QAAK,MAAA,QAAc,MAAM;AACzB,OAAI,KAAK,aAAa,KAAK,OAAA,GACzB;AAEF,mBAAgB;AAChB,SAAA,QAAc,MAAM;AACpB,QAAK,MAAA,QAAc,MAAM;GACzB,IAAI,WAAW,MAAA,QAAc;AAC7B,UAAM,OAAO,KAAK,CAAC,kBAAkB,GAAG,EAAC;AACvC,UAAA,QAAc,MAAM;AACpB,SAAK,MAAA,QAAc,MAAM;;GAE3B,MAAM,gBAAgB,MAAA,QAAc,OAAO,MAAM,UAAU,MAAA,QAAc,SAAS;GAClF,MAAM,gBAAgB,EAAE;AACxB,OAAI,cAAc,SAAS,EACzB,OAAM,MAAA,YAAkB,wEAAwE;AAElG,UAAM,OAAO,GAAE;AACb,SAAK,iBAAiB;AACtB,SAAK,aAAa;AAClB,SAAK,MAAA,QAAc,MAAM;AACzB,QAAI,MAAM,GAAG,CAAE;AACf,eAAW,MAAA,QAAc;AACzB,WAAM,OAAO,KAAK,CAAC,kBAAkB,GAAG,EAAC;AACvC,WAAA,QAAc,MAAM;AACpB,UAAK,MAAA,QAAc,MAAM;;AAE3B,kBAAc,KAAK,MAAA,QAAc,OAAO,MAAM,UAAU,MAAA,QAAc,SAAS,CAAC;;AAElF,OAAI,OAAO,EAAG,MAAK,eAAe;AAClC,WAAO,eAAP;IACE,KAAK;AACH,SAAI,YAAY,KACd,OAAM,MAAA,YAAkB,+DAA+D;AAEzF,eAAU,KAAK,qBAAqB,cAAc;AAClD;IACF,KAAK;AACH,UAAK,oBAAoB,cAAc;AACvC;IACF;AACE,UAAK,gBAAgB,+BAA+B,cAAc,GAAG;AACrE;;AAEJ,QAAK,MAAA,QAAc,MAAM;;AAE3B,SAAO;;CAET,eAAe;EACb,MAAM,gBAAgB,MAAA,QAAc;AACpC,OAAK,kBAAkB;AACvB,OAAK,yBAAS,IAAI,KAAK;AACvB,OAAK,4BAAY,IAAI,KAAK;EAC1B,MAAM,gBAAgB,KAAK,gBAAgB;AAC3C,OAAK,oBAAoB,MAAM,GAAG;EAClC,IAAI,SAAS;AACb,MAAI,KAAK,eAAe,KAAK,MAAA,QAAc,MAAM,KAAA,MAAc,MAAA,QAAc,KAAK,EAAE,KAAA,MAAc,MAAA,QAAc,KAAK,EAAE,KAAA,IAAY;AACjI,SAAA,QAAc,YAAY;AAC1B,QAAK,oBAAoB,MAAM,GAAG;aACzB,cACT,OAAM,MAAA,YAAkB,wDAAwD;EAElF,MAAM,WAAW,KAAK,YAAY;GAChC,cAAc,KAAK,aAAa;GAChC,aAAa;GACb,aAAa;GACb,cAAc;GACf,CAAC;AACF,MAAI,SAAU,UAAS,SAAS;AAChC,OAAK,oBAAoB,MAAM,GAAG;AAClC,MAAI,KAAK,mBAAmB,8BAA8B,KAAK,MAAA,QAAc,OAAO,MAAM,eAAe,MAAA,QAAc,SAAS,CAAC,CAC/H,MAAK,gBAAgB,mDAAmD;AAE1E,MAAI,MAAA,QAAc,aAAa,KAAK,aAAa,KAAK,uBAAuB;OACvE,MAAA,QAAc,MAAM,KAAA,IAAU;AAChC,UAAA,QAAc,YAAY;AAC1B,SAAK,oBAAoB,MAAM,GAAG;;aAE3B,CAAC,MAAA,QAAc,KAAK,CAC7B,OAAM,MAAA,YAAkB,8EAA8E;AAExG,SAAO;;CAET,CAAC,gBAAgB;AACf,SAAM,CAAC,MAAA,QAAc,KAAK,CACxB,OAAM,KAAK,cAAc;;;;;AC5yC/B,SAAS,cAAc,OAAO;AAC5B,SAAQ,OAAO,MAAM;AACrB,KAAI,MAAM,SAAS,GAAG;AAEpB,MAAI,CAAC,MAAM,MAAM,WAAW,MAAM,SAAS,EAAE,CAAC,CAAE,UAAS;AAEzD,MAAI,MAAM,WAAW,EAAE,KAAK,MAAQ,SAAQ,MAAM,MAAM,EAAE;;AAE5D,QAAO;;;;;;;;;;;;;;;;;;;;;;;;GAwBL,SAAgB,MAAM,SAAS,UAAU,EAAE,EAAE;AAC/C,WAAU,cAAc,QAAQ;CAKhC,MAAM,oBAJQ,IAAI,YAAY,SAAS;EACrC,GAAG;EACH,QAAQ,WAAW,IAAI,QAAQ,OAAO;EACvC,CAAC,CAC8B,eAAe;CAC/C,MAAM,WAAW,kBAAkB,MAAM,CAAC;AAC1C,KAAI,CAAC,kBAAkB,MAAM,CAAC,KAC5B,OAAM,IAAI,YAAY,uEAAuE;AAE/F,QAAO,YAAY;;;;;;;;GC/CjB,IAAI,eAqUO,OAAO,IAAI,qBAAqB;AAC/C,IAAa,gBAAb,MAAa,sBAAsB,OAAO;CACxC;CACA;CACA;CACA;CACA;CACA,YAAY,MAAM,UAAU,eAAe,QAAQ,SAAQ;EACzD,MAAM,mBAAmB,EAAE;AAC3B,OAAI,IAAI,IAAI,GAAG,IAAI,SAAS,IAAI,QAAQ,IACtC,kBAAiB,KAAK,SAAS,IAAI,GAAG,QAAQ,aAAa,KAAK,CAAC;EAEnE,MAAM,MAAM,OAAO,IAAI,EACrB,KAAK,kBACN,EAAE,GAAG,cAAc;EACpB,IAAI,MAAM,IAAI,UAAU;AAExB,MAAI,OAAO,SAAS,WAAW,SAC7B,KAAI,QAAQ,SAAS,GAAG;GACtB,MAAM,SAAS,IAAI,OAAO,QAAQ,OAAO;AACzC,SAAM,IAAI,QAAQ,eAAe,OAAO;QAExC,OAAM,IAAI,QAAQ,WAAW,UAAQ;AACnC,UAAO,MAAM,MAAM,GAAG,MAAM,SAAS,QAAQ,OAAO;IACpD;AAGN,MAAI,SAAS,WAAW,OAAO;GAE7B,MAAM,QAAQ,IAAI,MAAM,KAAK;GAE7B,MAAM,UADe,MAAM,MAAM,SAAO,KAAK,MAAM,KAAK,GAAG,IAAI,IACnC,MAAM,OAAO,GAAG,IAAI,UAAU;AAC1D,SAAM,MAAM,KAAK,SAAO,KAAK,MAAM,OAAO,CAAC,CAAC,KAAK,KAAK;;AAExD,QAAM,IAAI;AACV,OAAK,OAAO;AACZ,OAAK,MAAM;AACX,OAAK,SAAS;;CAEhB,CAAC,gBAAgB;AACf,SAAO,KAAK,SAAS;;CAEvB,OAAO,OAAO;AACZ,SAAO,IAAI,cAAc,KAAK,MAAM,EAClC,KAAK,CACH,KAAK,IACN,EACF,EAAE,EAAE,EAAE,KAAA,GAAW,EAChB,QAAQ,OACT,CAAC;;CAEJ,WAAW;AACT,SAAO,KAAK,OAAO,MAAM;;CAE3B,QAAQ;AACN,MAAI,KAAK,MACP,OAAM,KAAK;AAEb,SAAO;;CAET,MAAM,QAAQ;AACZ,OAAK,SAAS,UAAU,KAAK;AAC7B,MAAI;AACF,QAAK,OAAO,KAAK,SAAS,KAAK,IAAI;WAC5B,OAAO;AACd,QAAK,QAAQ;;AAEf,SAAO;;;;;;;;GAQP,SAAgB,IAAI,MAAM,QAAQ,SAAS;AAC7C,SAAQ,UAAU,GAAG,kBAAgB,IAAI,cAAc,MAAM,UAAU,eAAe,QAAQ,QAAQ;;AAIxG,MAAa,OAAO,IAAI,OAAO;AAC/B,MAAa,MAAM,IAAI,MAAM;AAC7B,MAAa,KAAK,IAAI,KAAK;AAC3B,MAAaI,OAAK,IAAI,KAAK;AACR,IAAI,MAAM;AAC7B,MAAa,MAAM,IAAI,MAAM;AAE7B,MAAa,OAAO,IAAI,QAAQ,KAAK,MAAM;AACxB,IAAI,MAAM;AAC7B,MAAa,OAAO,IAAI,QAAQC,MAAW;AACvB,IAAI,OAAO;AACZ,IAAI,MAAM;AACV,IAAI,MAAM;AAE7B,MAAa,KAAK,IAAI,KAAK;AACR,IAAI,MAAM;AACV,IAAI,MAAM;AAEV,IAAI,MAAM;AACN,IAAI,UAAU;AAEnB,IAAI,KAAK;AACR,IAAI,MAAM;AACV,IAAI,MAAM;AAEX,IAAI,KAAK;AACT,IAAI,KAAK;AACT,IAAI,KAAK;AACT,IAAI,KAAK;AACV,IAAI,IAAI;AACN,IAAI,MAAM;AACX,IAAI,KAAK;AACP,IAAI,OAAO;AACZ,IAAI,MAAM;AACR,IAAI,QAAQ;AACf,IAAI,KAAK;AACN,IAAI,QAAQ;AACb,IAAI,OAAO;AACZ,IAAI,MAAM;AACX,IAAI,KAAK;AACV,IAAI,IAAI;AACN,IAAI,MAAM;AACX,IAAI,KAAK;AACR,IAAI,MAAM;AACX,IAAI,KAAK;AAED,IAAI,aAAa;AACnB,IAAI,WAAW;AACrB,IAAI,KAAK;AAER,IAAI,MAAM;AACT,IAAI,OAAO;AACV,IAAI,QAAQ;AACd,IAAI,MAAM;;;AC5c7B,MAAa,6BAA6B,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAuD3C;;;ACvDF,MAAa,qCACX,GAAG;;;;;;;;;;;;;;;;;;;;;EAqBH;;;ACtBF,MAAa,4BAA4B,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA6B1C;;;AC7BF,MAAa,yBAAyB,GAAG;;;;;;;;;;;;;;;;EAgBvC;;;AChBF,MAAa,6BACX,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;EA2BH;;;AC5BF,MAAa,+BACX,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;EAyBH;;;AC1BF,MAAa,yBAAyB,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAyGvC;;;ACzGF,MAAa,uDACX,GAAG;;;;;;;;;;;EAWH;;;ACZF,MAAa,yBAAyB,MAIpC,IAAE;;;;0BAIsB,EAAE,2BAA2B;0BAC7B,EAAE,2BAA2B;;EAErD;;;ACXF,MAAa,8BACX,GAAG;;;;;;;;;;;;;;;;;;;;;EAqBH;;;ACtBF,MAAa,iBAAiB,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAshB9B;;;ACthBF,MAAa,8BAA8B,IAAI;;;;;;;;;;;;;;;EAe7C;;;ACfF,MAAa,oBAAoB,IAAI;;;;;;;;;EASnC;;;ACXF,MAAa,4BAA4B;;;;;;;;;;;;;;;;;;;ACAzC,MAAa,qBAAqB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAlC,MAAa,oBAAoB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACAjC,MAAa,gCAAgC;;;;;;;;;;;;;;;;;;;;ACE7C,MAAa,yBAAyB,IAAE;;;;;;;;EAQtC;;;ACVF,MAAa,8BAA8B;;;;;;;;ACE3C,MAAa,2BAA2B,IAAE;;;;;;;;EAQxC;;;ACRF,MAAa,kBAAkB,IAAE;;;;;;;;EAQ/B;;;ACVF,MAAa,uBAAuB;;;;;;;;ACEpC,MAAa,uBAAuB,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAyIpC;;;ACzIF,MAAa,yBAAyB,IAAI;;;;;;;;;EASxC;;;ACTF,MAAa,iCAAiC,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAqXhD;;;ACvXF,MAAa,oBAAoB;;;;;;;;;;;;;;;;;;ACEjC,MAAa,oBAAoB,KAAA,sBAAA,oBAAA,uBAAA,CAAA,0oCAAA,CAAA,EAuBtB,CAAC;;;;ACvBZ,MAAa,0BAA0B,KAAA,oBAAA,kBAAA,uBAAA,CAAA,67CAAA,CAAA,EAiCtC,CAAC;;;ACjCF,MAAa,kBAAkB,IAAE;;;;;;;;;;;;EAY/B;;;ACdF,MAAa,kBAAkB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACE/B,MAAa,kBAAkB,GAAG;;;;;;;;;;;;;;;;;EAiBhC;;;ACjBF,MAAa,cAAc,IAAI;;;;;;;;;EAS7B;;;ACXF,MAAa,gBAAgB;;;ACE7B,MAAa,qBAAqB;CAChC,KAAK;EACH,OAAO;EACP,SAAS;EACT,MAAM;EACP;CACD,qBAAqB;EACnB,OAAO;EACP,SAAS;EACT,MAAM;EACP;CACD,uBAAuB;EACrB,OAAO;EACP,SAAS;EACT,MAAM;EACP;CACD,aAAa;EACX,OAAO;EACP,SAAS;EACT,MAAM;EACP;CACD,eAAe;EACb,OAAO;EACP,SAAS;EACT,MAAM;EACP;CACD,eAAe;EACb,OAAO;EACP,SAAS;EACT,MAAM;EACP;CACD,gBAAgB;EACd,OAAO;EACP,SAAS;EACT,MAAM;EACP;CACD,cAAc;CACd,eAAe;CAChB;AAED,MAAa,iBAAiB;CAC5B,cAAc;CACd,MAAM;CACN,YAAY;CACZ,KAAK;CACL,aAAa;CACb,UAAU;CACV,SAAS;CACT,OAAO;CACP,SAAS;CACT,SAAS;CACT,OAAO;CACP,mBACE;CACF,qBACE;CACH;AAED,MAAa,uBAAuB;CAClC,uCAAuC;CACvC,SAAS;CACT,eAAe;CACf,KAAK;CACL,OAAO;CACP,aAAa;CACd;AAED,MAAa,0BAA0B;CACrC,wBAAwB;CACxB,8BAA8B;CAC9B,cAAc;CACd,oBAAoB;CACpB,eAAe;CACf,gBAAgB;CAChB,wBAAwB;CACxB,QAAQ;CACR,0BAA0B;CAC1B,0BAA0B;CAC1B,uBAAuB;CACvB,6BAA6B;CAC7B,SAAS;CACT,aAAa;CACb,YAAY;CACZ,qBAAqB;CACrB,MAAM;CACN,uBAAuB;CACvB,QAAQ;CACT;AAED,MAAa,kBAA4B;CACvC,MAAM;CACN,aAAa;CACb,UAAU;CACV,WAAW;EACT,MAAM;EACN,KAAK;EACN;CACD,gBAAgB,EAAE;CAClB,SAAS,EAAE;CACX,MAAM,EAAE;CACR,WAAW,EAAE;CACb,YAAY,EAAE;CACf;;;;;;AC9FD,SAAS,cAAc,OAAuB;AAC5C,QAAO,KAAK,UAAU,OAAO,MAAM,EAAE,CAAC,MAAM,GAAG,GAAG,CAAC,SAAS;;AAG9D,MAAa,kBAAkB,cAAc,mBAAmB;AAEhE,MAAM,uBAAuB,cAAc,qBAAqB;AAChE,MAAM,0BAA0B,cAAc,wBAAwB;AAEtE,MAAM,kBAAkB,IAAI;;;;;;;;;;;;;;;;EAgB1B;AAEF,MAAM,wBAAwB,0BAC5B,IAAI;IACF,sBAAsB,KAAK,QAAQ,CAAC;EACtC,qBAAqB;EACrB;AAEF,MAAM,2BAA2B,6BAC/B,IAAI;IACF,yBAAyB,KAAK,QAAQ,CAAC;EACzC,wBAAwB;EACxB;AAEF,MAAa,uBACX,aACA,uBACA,6BAEA,IAAI;;aAEO,YAAY;;;;;;;;;MASnB,gBAAgB;;;MAGhB,gBAAgB;;;MAGhB,qBAAqB,sBAAsB,CAAC;;;MAG5C,wBAAwB,yBAAyB,CAAC;;;EAGtD;;;AC7EF,MAAa,6BAA6B;CACxC,OAAO;CACP,MAAM;CACN,cAAc;CACd,MAAM;CACN,YAAY;CACZ,KAAK;CACL,aAAa;CACb,UAAU;CACV,gBAAgB;CAChB,0BAA0B;CAC1B,UAAU;CACV,SAAS;CACT,SAAS;CACT,SAAS;CACT,OAAO;CACP,SAAS;CACT,mBACE;CACF,qBACE;CACH;AAED,MAAa,6BAA6B;CACxC,KAAK;EACH,OAAO;EACP,SAAS;EACV;CACD,qBAAqB;EACnB,OAAO;EACP,SAAS;EACV;CACD,aAAa;EACX,OAAO;EACP,SAAS;EACV;CACD,uBAAuB;EACrB,OAAO;EACP,SAAS;EACV;CACD,eAAe;EACb,OAAO;EACP,SAAS;EACV;CACD,eAAe;EACb,OAAO;EACP,SAAS;EACV;CACD,gBAAgB;EACd,OAAO;EACP,SAAS;EACV;CACD,cAAc,EACZ,SAAS,mCACV;CACD,eAAe;CAChB;;;ACtDD,SAAS,qBAAqB,eAAmC;AAC/D,KAAI,CAAC,cAAe,QAAO;AAE3B,QAAO,IAAI;;;kBADK,cAAc,MAAM,IAAI,CAAC,KAAK,IAAI,GAI1B;mBACP,cAAc;;EAE/B;;AAGF,eAAsB,8BAA8B,MAIjD;AAED,QAAO,IAAI;;;;;;;;;;;;;;2BAcc,qBAAqB,GAfrB,qBAAqB,KAAK,YAAY,CAeG;;EAElE;;;;AClCF,MAAa,oBAAoB,gBAC/B,IAAI;;eAES,YAAY;;;;;;;;;;;;;;;EAezB;;;AClBF,MAAa,iBAAiB,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA+L9B;;;AC/LF,MAAa,gBAAgB,GAAG;;;;;;;;;;;;;;;EAe9B;;;ACjBF,MAAa,yBAAyB;;;;;;;;ACEtC,MAAa,wBAAwB,IAAI;;;;;;;;;;;;;;;;;;;;;GAqBtC;AAEH,MAAa,mBAAmB,IAAI;;;;;;;QAO5B,sBAAsB;;;;;;;;;;;;;;;;;;;;EAoB5B;;;AClDF,MAAa,uBAAuB,IAAE;;;;;;;;;;;;EAYpC;;;ACZF,SAAS,0BAA0B,YAAyB;CAC1D,MAAM,uBAAgE,EAAE;AAExE,MAAK,MAAM,aAAa,WACtB,KAAI,CAAC,qBAAqB,UAAU,UAClC,sBAAqB,UAAU,YAAY,CAAC,UAAU;KAEtD,sBAAqB,UAAU,WAAW,KAAK,UAAU;AAI7D,QAAO;;AAGT,SAAS,oBAAoB,kBAAqC;CAChE,MAAM,eAAe,iBAAiB,KAAK,EAAE,WAAW,KAAK;CAC7D,MAAM,yBAAmC,EAAE;AAE3C,MAAK,MAAM,QAAQ,aACjB,wBAAuB,KACrB,MAAM,YAAY,KAAK,CAAC,KAAK,UAAU,KAAK,CAAC,KAC9C;AAGH,QAAO,uBAAuB,KAAK,GAAG;;AAGxC,SAAS,wBAAwB,UAAoB;AAQnD,QAPwB,SAAS,QAC9B,MAAM,MAAM,cAAc,MAAM,gBAClC,CACiC,KAAK,MAAM;EAC3C,MAAM,CAAC,OAAO,GAAG,QAAQ,EAAE,MAAM,IAAI,CAAC,KAAK,MAAM,EAAE,MAAM,CAAC;AAC1D,SAAO,KAAK,MAAM,QAAQ,KAAK,KAAK,GAAG,CAAC;GACxC;;AAIJ,SAAS,uBAAuB,UAAoB;AAClD,KACE,SAAS,SAAS,WAAW,IAC7B,SAAS,SAAS,gBAAgB,IAClC,SAAS,MAAM,MAAM,EAAE,SAAS,UAAU,CAAC,CAE3C,QAAO;AACT,QAAO;;AAGT,SAAS,qBAAqB,OAAe;AAC3C,KAAI,MAAM,SAAS,KAAK,CAEtB,QAAO,QADc,YAAY,MAAM,MAAM,IAAI,CAAC,GAAG,QAAQ,MAAM,GAAG,CAAC;AAMzE,QAAO,QAHsB,YAC3B,MAAM,QAAQ,KAAK,GAAG,CAAC,QAAQ,KAAK,GAAG,CACxC;;AAGH,SAAS,yBAAyB,WAAsB;CACtD,MAAM,EAAE,UAAU,aAAa,UAAU;AAKzC,QAAO,GAJS,qBAAqB,MAAM,CAIzB,GAHS,uBAAuB,SAAS,CAGnB;EACxC,YAAY;eACC,MAAM;EAJS,wBAAwB,SAAS,CAKzC,KAAK,OAAO,CAAC;;;AAInC,SAAS,0BAA0B,YAAyB;AAC1D,QAAO,WAAW,IAAI,yBAAyB,CAAC,KAAK,GAAG;;AAG1D,SAAS,uCAAuC,YAAyB;CACvE,MAAM,uBAAuB,0BAA0B,WAAW;CAClE,MAAM,iBAA2B,EAAE;AAEnC,MAAK,MAAM,CAAC,UAAU,eAAe,OAAO,QAAQ,qBAAqB,EAAE;EACzE,MAAM,gBAAgB,0BAA0B,cAAc,EAAE,CAAC;AAEjE,iBAAe,KACb,OAAO,SAAS;EACpB,cAAc;EAEX;;AAGH,QAAO,eAAe,KAAK,GAAG;;AAGhC,SAAS,eAAe,iBAAkC;CACxD,MAAM,EAAE,MAAM,aAAa,eAAe;AAC1C,QAAO,MAAM,YAAY,KAAK,CAAC;EAC/B,YAAY;EACZ,uCAAuC,WAAW;;AAGpD,SAAS,gBAAgB,kBAAqC;AAC5D,QAAO,iBAAiB,IAAI,eAAe,CAAC,KAAK,GAAG;;AAGtD,MAAa,2BAA2B,MAMtC,KAAK,EAAE,UAAU;EACjB,EAAE,iBAAiB;EACnB,EAAE,eAAe;;EAEjB,oBAAoB,EAAE,iBAAiB,CAAC;EACxC,gBAAgB,EAAE,iBAAiB,CAAC;;;;ACjHtC,MAAa,oCACX,MAIA,GAAG;;WAEM,EAAE,4BAA4B,oBAAoB,EAAE,wBAAwB;;;iCAGtD,EAAE,4BAA4B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA6E7D;;;ACxFF,MAAa,oCAAoC,MAM/C,GAAG;;;;;;;;sCAQiC,EAAE,cAAc;eACvC,EAAE,qBAAqB;;qBAEjB,EAAE,cAAc;;eAEtB,EAAE,SAAS;iBACT,EAAE,WAAW;;;EAG5B;;;ACpBF,SAAS,uBACP,QACA,uBACA;CACA,MAAM,sBAAsB,UAAU,OAAO,KAAK;AAClD,QAAO,GAAG,wBAAwB,oBAAoB,OAAO,EAAE,CAAC,aAAa,GAAG,oBAAoB,MAAM,EAAE,CAAC;;AAG/G,SAAS,wBACP,SACA,uBACA;AACA,QAAO,QAAQ,KAAK,MAAM,uBAAuB,GAAG,sBAAsB,CAAC;;AAG7E,SAAS,0BACP,SACA,uBACA;AAKA,QAAO,YAJa,wBAClB,SACA,sBACD,CAAC,KAAK,MAAM,CACkB;;AAGjC,SAAS,+BACP,SACA,uBACA;AAOA,QAAO;2CANmB,wBACxB,SACA,sBACD,CACE,KAAK,MAAM,MAAM,IAAI,CACrB,KAAK,MAAM,CAE6C;;AAE7D,MAAa,wCACX,MAEA,IAAE;;;;;;EAMF,0BAA0B,EAAE,cAAc,SAAS,EAAE,sBAAsB,CAAC;;sBAExD,EAAE,uBAAuB;EAC7C,+BAA+B,EAAE,cAAc,SAAS,EAAE,sBAAsB,CAAC;EACjF;;;ACpDF,SAAS,4BACP,QACA,wBACA;AAGA,QAAO,iBAAiB,yBAFK,WAAW,OAAO,KAAK,CAEkB,mBAD1C,UAAU,OAAO,KAAK,CAC2D;;AAE/G,SAAS,6BACP,SACA,wBACA;AACA,QAAO,QACJ,KAAK,WACJ,4BAA4B,QAAQ,uBAAuB,CAC5D,CACA,KAAK,KAAK;;AAGf,SAAS,4BAA4B,QAA6B;AAEhE,QAAO,oBADqB,UAAU,OAAO,KAAK,CACH;;AAGjD,SAAS,6BAA6B,SAAgC;AACpE,QAAO,QAAQ,IAAI,4BAA4B,CAAC,KAAK,KAAK;;AAG5D,SAAS,yBACP,QACA,wBACA;AAEA,QAAO,GAAG,yBADmB,WAAW,OAAO,KAAK,CACI;;AAG1D,SAAS,2BACP,SACA,wBACA;AACA,QAAO,QACJ,KAAK,WAAW,yBAAyB,QAAQ,uBAAuB,CAAC,CACzE,KAAK,MAAM;;AAGhB,SAAS,uBACP,SACA,wBACA;AAKA,QAAO,eAAe,uBAAuB,WAJrB,2BACtB,SACA,uBACD,CACuE;;AAE1E,MAAa,uCACX,MAEA,IAAE;;;;;EAKF,6BAA6B,EAAE,cAAc,SAAS,EAAE,uBAAuB,CAAC;;EAEhF,6BAA6B,EAAE,cAAc,QAAQ,CAAC;;EAEtD,uBAAuB,EAAE,cAAc,SAAS,EAAE,uBAAuB,CAAC;EAC1E;;;ACtEF,MAAa,0CACX,MAEA,IAAE;;;;;;WAMO,EAAE,uBAAuB;gBACpB,EAAE,eAAe,IAAI,EAAE,YAAY;;eAEpC,EAAE,uBAAuB;IACpC,EAAE,YAAY;IACd,EAAE,eAAe;IACjB,EAAE,uBAAuB;EAC3B;;;ACdF,SAAS,0BACP,QACA,uBACA;CACA,MAAM,sBAAsB,UAAU,OAAO,KAAK;CAClD,MAAM,sBAAsB,UAAU,OAAO,KAAK;CAClD,MAAM,gBAAgB,GAAG,wBAAwB,oBAAoB,OAAO,EAAE,CAAC,aAAa,GAAG,oBAAoB,MAAM,EAAE,CAAC;AAI5H,QAAO,CAHsB,oBAAoB,oBAAoB,iBAC/B,eAAe,cAAc,WAAW,oBAAoB,gBAEtC;;AAE9D,SAAS,qBACP,SACA,uBACA;AACA,QAAO,QACJ,SAAS,WACR,0BAA0B,QAAQ,sBAAsB,CACzD,CACA,KAAK,KAAK;;AAEf,MAAa,wCACX,MAEA,IAAE;;;;;EAKF,qBAAqB,EAAE,cAAc,SAAS,EAAE,sBAAsB,CAAC;EACvE;;;ACjCF,MAAa,2CACX,MAEA,IAAE;;;;;;;;;;WAUO,EAAE,yBAAyB;WAC3B,EAAE,gBAAgB;gBACb,EAAE,mBAAmB,IAAI,EAAE,YAAY;;mDAEJ,EAAE,uBAAuB;eAC7D,EAAE,mBAAmB;4BACR,EAAE,yBAAyB;;;kDAGL,EAAE,uBAAuB;eAC5D,EAAE,YAAY;YACjB,EAAE,gBAAgB;;;eAGf,EAAE,qBAAqB;YAC1B,EAAE,mBAAmB;WACtB,EAAE,YAAY;kBACP,EAAE,YAAY;;;6DAG6B,EAAE,uBAAuB;kBACpE,EAAE,4BAA4B;;cAElC,EAAE,YAAY;WACjB,EAAE,YAAY;;;yEAGgD,EAAE,uBAAuB;kBAChF,EAAE,kCAAkC;;sBAEhC,EAAE,YAAY;IAChC,EAAE,YAAY;;;yDAGuC,EAAE,uBAAuB;kBAChE,EAAE,+BAA+B;;iBAElC,EAAE,mBAAmB;WAC3B,EAAE,qBAAqB;;;4DAG0B,EAAE,uBAAuB;kBACnE,EAAE,qCAAqC;;yBAEhC,EAAE,mBAAmB;IAC1C,EAAE,qBAAqB;;EAEzB;;;AC5DF,MAAa,qCACX,MAEA,IAAE;;;;;eAKW,EAAE,yBAAyB,MAAM,EAAE,mBAAmB,GAAG;EACtE;;;ACPF,SAAS,6BAA6B,QAA6B;AAEjE,QAAO,oBADe,UAAU,OAAO,KAAK,CACH;;AAG3C,SAAS,8BAA8B,SAAgC;AACrE,QAAO,QAAQ,IAAI,6BAA6B,CAAC,KAAK,KAAK;;AAG7D,MAAa,qCACX,MAEA,IAAE;;;;;;;;;;UAUM,EAAE,mBAAmB;;;;;;;;;;;;EAY7B,8BAA8B,EAAE,cAAc,QAAQ,CAAC;EACvD;;;AClCF,SAAgB,kBAAkB,WAAmC;AACnE,KAAI,CAAC,UAAU,KAAM;AACrB,QAAO,GAAG,WAAW,UAAU,KAAK,CAAC;;AAGvC,SAAgB,mBAAmB,WAAmC;AACpE,KAAI,CAAC,UAAU,KAAM;AACrB,KAAI,CAAC,kBAAkB,UAAU,CAAE;AACnC,QAAO,GAAG,WAAW,UAAU,KAAK,CAAC;;AAGvC,SAAgB,cAAc,WAAmC;AAC/D,KAAI,CAAC,UAAU,KAAM;AACrB,QAAO,aAAa,UAAU,KAAK;;AAGrC,SAAgB,wBACd,MACA;AACA,QAAO,KAAK,OAAO,WAAW,IAAI,mBAAmB,CAAC,KAAK,MAAM;;;;ACfnE,SAAgB,uBAAuB,EACrC,iCACoC;AACpC,QAAO;EACL;EACA,sBAAsB,UAAU,8BAA8B;EAC9D,6BAA6B,cAAc;EAC3C,0BAA0B,GAAG,8BAA8B;EAC3D,sBAAsB;EACtB,wBAAwB,cAAc,8BAA8B;EACpE,yBAAyB,oBAAoB,8BAA8B;EAC3E,mBAAmB;EACnB,uBAAuB;EACvB,0BAA0B;EAC1B,6BAA6B,cAAc,8BAA8B;EAC1E;;AAGH,SAAgB,wBACd,oBACA,iBACA;AACA,KAAI,gBAAiB,QAAO;AAC5B,QAAO,UAAU,mBAAmB,KAAK;;AAG3C,SAAgB,2BAA2B,EACzC,kBAC2B;AAC3B,QAAO,eAAe,eAAe,SAAS;;AAGhD,SAAgB,oCACd,EAAE,kBACF,SACuB;CACvB,MAAM,uBAAuB,eAAe;CAC5C,MAAM,wBAAwB,eAAe,MAC1C,SAAS,KAAK,YAAY,QAC5B;AACD,KAAI,CAAC,uBAAuB;AAC1B,UAAQ,MACN,qCAAqC,QAAQ,4DAC9C;AACD,SAAO;;AAET,KAAI,qBAAqB,YAAY,sBAAsB,SAAS;AAClE,UAAQ,MACN,8BAA8B,QAAQ,gDAAgD,UACvF;AACD,SAAO;;AAGT,QAAO;;AAGT,SAAgB,wCACd,oBACA;AACA,QAAO,2BAA2B,mBAAmB,CAAC;;AAGxD,SAAgB,8BAA8B,mBAA2B;CACvE,MAAM,wBAAwB,UAAU,kBAAkB;CAC1D,MAAM,yBAAyB,WAAW,kBAAkB;CAC5D,MAAM,wBAAwB,UAAU,kBAAkB;CAC1D,MAAM,2BAA2B,GAAG,sBAAsB;CAC1D,MAAM,YAAY,GAAG,uBAAuB;CAC5C,MAAM,kBAAkB,GAAG,uBAAuB;CAClD,MAAM,iBAAiB,GAAG,uBAAuB;CACjD,MAAM,cAAc,GAAG,uBAAuB;CAC9C,MAAM,qBAAqB,GAAG,uBAAuB;CACrD,MAAM,iBAAiB,GAAG,uBAAuB;CACjD,MAAM,kBAAkB,GAAG,eAAe;CAC1C,MAAM,cAAc,UAAU,gBAAgB;AAc9C,QAAO;EACL;EACA;EACA;EACA;EACA,qBAlB0B,GAAG,sBAAsB;EAmBnD;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA,iBAzBsB,GAAG,UAAU;EA0BnC,sBAzB2B,GAAG,mBAAmB;EA0BjD,6BAzBkC,KAAK;EA0BvC,mCAzBwC,WAAW;EA0BnD,gCAzBqC,KAAK;EA0B1C,sCAzB2C,WAAW;EA0BtD,iBAzBsB,MAAM,mBAAmB;EA0B/C,qBAzB0B,cAAc;EA0BxC,4BAzBiC,MAAM,mBAAmB;EA0B1D,6BAzBkC,MAAM,mBAAmB;EA0B5D;;;;AC9GH,SAAS,oBAAoB,WAAmC;CAC9D,MAAM,qBAAqB,uBAAuB,UAAU,GACxD,yBACA;CACJ,MAAM,iBAAiB,kBAAkB,UAAU;CACnD,MAAM,kBAAkB,mBAAmB,UAAU,IAAI;AAGzD,QAAO,IAAE,eAAe,eAAe,KAAK,mBAAmB,cAF5C,cAAc,UAAU,CAE6C,YAAY,gBAAgB,KACjH;;AAGL,SAAS,qBAAqB,MAAwC;AACpE,QAAO,KAAK,OAAO,WAAW,IAAI,oBAAoB,CAAC,KAAK,KAAK;;AAGnE,SAAgB,oBAAoB,MAAwC;CAC1E,MAAM,EAAE,wBAAwB,WAAW;CAC3C,MAAM,kBAAkB,OAAO,WAAW,IAAI,kBAAkB,CAAC,KAAK,OAAO;AAC7E,QAAO,IAAE,eAAe,yBAAyB,WAAW,OAAO,KAAK,CAAC,WAAW,gBAAgB,GACjG;;AAGL,SAAS,sCACP,MACA;CACA,MAAM,oBAAoB,CAAC,SAAS;AAIpC,KAH+B,KAAK,OAAO,WAAW,MAAM,MAC1D,uBAAuB,EAAE,CAC1B,CAEC,mBAAkB,KAAK,uBAAuB;AAEhD,QAAO,kBAAkB,KAAK,MAAM;;AAEtC,MAAa,mDACX,MAEA,IAAE;;;;;gBAKY,sCAAsC,EAAE,CAAC;;IAErD,wBAAwB,EAAE,CAAC;;;EAG7B,qBAAqB,EAAE,CAAC;;EAExB,oBAAoB,EAAE,CAAC;EACvB;;;ACxDF,SAAS,6BAA6B,MAAwC;CAC5E,MAAM,oBAAoB,CAAC,eAAe;AAI1C,KAH+B,KAAK,OAAO,WAAW,MAAM,MAC1D,uBAAuB,EAAE,CAC1B,CAEC,mBAAkB,KAAK,kBAAkB;AAE3C,QAAO,kBAAkB,KAAK,MAAM;;AAGtC,SAAS,0BAA0B,QAAgC;AACjE,KAAI,CAAC,OAAO,QAAQ,CAAC,OAAO,OAAQ;AAEpC,QAAO,GADsB,WAAW,OAAO,KAAK,CACrB;;AAGjC,SAAS,wBAAwB,QAAgC;AAC/D,KAAI,CAAC,OAAO,QAAQ,CAAC,OAAO,OAAQ;AAEpC,QAAO,GADsB,WAAW,OAAO,KAAK,CACrB;;AAGjC,SAAS,mBAAmB,QAAgC;AAC1D,KAAI,CAAC,OAAO,QAAQ,CAAC,OAAO,OAAQ;AACpC,QAAO,GAAG,WAAW,OAAO,KAAK,CAAC;;AAGpC,SAAS,6BAA6B,MAAwC;AAC5E,QAAO,KAAK,OAAO,WAChB,IAAI,0BAA0B,CAC9B,OAAO,QAAQ,CACf,KAAK,MAAM;;AAGhB,SAAS,2BAA2B,MAAwC;AAC1E,QAAO,KAAK,OAAO,WAChB,IAAI,wBAAwB,CAC5B,OAAO,QAAQ,CACf,KAAK,MAAM;;AAGhB,SAAS,sBAAsB,MAAwC;AACrE,QAAO,KAAK,OAAO,WAAW,IAAI,mBAAmB,CAAC,KAAK,MAAM;;AAGnE,SAAS,2BAA2B,WAAmC;AACrE,KAAI,CAAC,UAAU,QAAQ,CAAC,UAAU,OAAQ;CAC1C,MAAM,sBAAsB,UAAU,UAAU,KAAK;CACrD,MAAM,yBAAyB,aAAa,UAAU,KAAK;CAC3D,MAAM,iBAAiB,mBAAmB,UAAU;CACpD,MAAM,kBAAkB,0BAA0B,UAAU;CAC5D,MAAM,gBAAgB,wBAAwB,UAAU;CACxD,MAAM,gBAAgB,uBAAuB,UAAU;CAKvD,MAAM,YAAY,CAJG,uBAAuB,UAAU,GAElD,UAAU,cAAc,SACxB,UAAU,gBACc;AAC5B,KAAI,cACF,WAAU,KAAK,iCAAiC;AAIlD,QAAO,IAAE;iBACM,oBAAoB,MAHtB,UAAU,KAAK,KAAK,CAGa;mBAC7B,eAAe;SACzB,uBAAuB;;QAExB,gBAAgB,gBAAgB,YAAY;QAC5C,gBAAgB;SACf,UAAU,MAAM;QACjB;;AAGR,SAAS,8BAA8B,WAAmC;AACxE,KAAI,CAAC,UAAU,QAAQ,CAAC,UAAU,OAAQ;CAC1C,MAAM,sBAAsB,UAAU,UAAU,KAAK;CACrD,MAAM,yBAAyB,aAAa,UAAU,KAAK;AAE3D,QAAO,IAAE;kBACO,oBAAoB;mBAFb,mBAAmB,UAAU,CAGpB,KAAK,uBAAuB,KAAK;;AAGnE,SAAS,gCACP,MACA;AAIA,QAHyB,KAAK,OAAO,WAAW,QAAQ,MACtD,SAAS,EAAE,OAAO,CACnB,CACuB,IAAI,2BAA2B,CAAC,KAAK,OAAO;;AAGtE,SAAS,+BACP,MACA;AAIA,QAH4B,KAAK,OAAO,WAAW,QAChD,MAAM,CAAC,SAAS,EAAE,OAAO,CAC3B,CAC0B,IAAI,8BAA8B,CAAC,KAAK,OAAO;;AAG5E,MAAa,qDACX,MAEA,IAAE;;;;;WAKO,6BAA6B,EAAE,CAAC;;EAEzC,6BAA6B,EAAE,CAAC;;;EAGhC,2BAA2B,EAAE,CAAC;;;EAG9B,sBAAsB,EAAE,CAAC;;;EAGzB,gCAAgC,EAAE,CAAC;;EAEnC,+BAA+B,EAAE,CAAC;EAClC;;;AC3HF,SAAS,aAAa,OAAoC;AACxD,KAAI,CAAC,MAAM,KAAM;AAEjB,QAD4B,WAAW,MAAM,KAAK;;AAIpD,SAAS,cAAc,QAAuC;AAC5D,QAAO,OAAO,IAAI,aAAa,CAAC,QAAQ,SAAS,SAAS,KAAA,EAAU;;AAEtE,SAAS,iBAAiB,QAAuC;AAK/D,QAAO,IAAE,2BAJU,cAAc,OAAO,CACrC,KAAK,SAAS,IAAI,KAAK,GAAG,CAC1B,KAAK,OAAO,CAEgC,GAAG;;AAGpD,SAAS,mBAAmB,OAAoC;CAC9D,MAAM,YAAY,aAAa,MAAM;AACrC,KAAI,CAAC,UAAW;AAEhB,QAAO,IAAE;mBACQ,UAAU;qBACR,UAAU;+BACA,UAAU;;;;IAIrC;;AAGJ,SAAS,6BAA6B,QAAuC;AAC3E,QAAO,OACJ,KAAK,UAAU,mBAAmB,MAAM,CAAC,CACzC,OAAO,QAAQ,CACf,KAAK,OAAO;;AAGjB,SAAS,yBAAyB,MAAwC;CACxE,MAAM,SAAS,QAAQ,KAAK,OAAO,aAAa,MAAM,KAAK,GAAG,SAAS,CAAC;AACxE,KAAI,CAAC,OAAO,OAAQ,QAAO;CAE3B,MAAM,qBAAqB,OAAO,QAAQ,KAAK,UAAU;AACvD,MAAI,CAAC,IAAI,MAAM,MAAM,aAAa,EAAE,KAAK,aAAa,MAAM,CAAC,CAC3D,KAAI,KAAK,MAAM;AAEjB,SAAO;IACN,IAAI,OAAoC,CAAC;AAE5C,QAAO,IAAE;MACL,iBAAiB,mBAAmB,CAAC;;;;;;MAMrC,6BAA6B,mBAAmB,CAAC;IACnD;;AAGJ,SAAS,sBAAsB,WAAmC;AAChE,KAAI,CAAC,UAAU,KAAM;CACrB,MAAM,SAAS,UAAU;AACzB,KAAI,OAAO,WAAW,EAAG;AAGzB,QAAO,IAAE;MAFoB,WAAW,UAAU,KAAK,CAG9B,MAFN,cAAc,OAAO,CAAC,OAAO,QAAQ,CAAC,KAAK,MAAM,CAE1B;IACxC;;AAGJ,SAAS,gBAAgB,MAAwC;AAM/D,QAAO,IAAE;4BALmB,KAAK,OAAO,WACrC,IAAI,sBAAsB,CAC1B,OAAO,QAAQ,CACf,KAAK,MAAM,CAGgC;IAC5C;;AAGJ,MAAa,kDACX,MAEA,IAAE;IACA,yBAAyB,EAAE,CAAC;IAC5B,gBAAgB,EAAE,CAAC;EACrB;;;ACvFF,SAAS,mBAAmB,SAAmC;AAC7D,QAAO,QAAQ,IAAI,kBAAkB;;AAGvC,SAAS,qBAAqB,MAAwC;AAEpE,QADwB,mBAAmB,KAAK,OAAO,WAAW,CAC3C,KAAK,MAAM;;AAGpC,SAAS,2BACP,wBACA,QACA;AAEA,QAAO,GAAG,yBADmB,WAAW,OAAO,KAAK,CACI;;AAG1D,SAAS,4BAA4B,QAAgC;AACnE,KAAI,CAAC,OAAO,KAAM;AAElB,QAAO,GADqB,UAAU,OAAO,KAAK,CACpB;;AAGhC,SAAS,gCACP,QACA,wBACA;AACA,KAAI,CAAC,OAAO,MAAO,QAAO,GAAG,uBAAuB;AAEpD,QAAO,GAAG,yBADkB,WAAW,OAAO,MAAM,CACG;;AAGzD,SAAS,mCACP,MACA;CACA,MAAM,iBAAiB,KAAK,OAAO,WAAW,KAAK,WACjD,gCAAgC,QAAQ,KAAK,uBAAuB,CACrE;AAED,QAAO,MAAM,KAAK,IAAI,IAAI,eAAe,CAAC,CAAC,KAAK,MAAM;;AAGxD,SAAS,2BACP,QACA,wBACA;AAMA,QAAO,IAAE;YAL4B,gCACnC,QACA,uBACD,CAGsC,YAFhB,kBAAkB,OAAO,CAEkB;EAClE;;AAGF,SAAS,4BACP,QACA,wBACA;AAMA,QAAO,IAAE;MALwB,4BAA4B,OAAO,CAMvC,IALG,2BAC9B,QACA,uBACD,CAEwD;IACvD;;AAGJ,SAAS,6BAA6B,MAAwC;AAC5E,QAAO,KAAK,OAAO,WAChB,KAAK,WACJ,4BAA4B,QAAQ,KAAK,uBAAuB,CACjE,CACA,KAAK,IAAI;;AAGd,MAAa,uDACX,MAEA,IAAE;;;;;;;IAOA,qBAAqB,EAAE,CAAC;;;IAGxB,mCAAmC,EAAE,CAAC;;;mBAGvB,2BACf,EAAE,wBACF,EAAE,OACH,CAAC;MACE,6BAA6B,EAAE,CAAC;;EAEpC;;;AC3GF,MAAa,wCACX,MAEA,IAAE;;;;kCAI8B,EAAE,mBAAmB;;;;;;;;;;;;IAYnD,EAAE,mBAAmB;IACrB,EAAE,gBAAgB;IAClB,EAAE,eAAe;IACjB,EAAE,YAAY;;;;wCAIsB,EAAE,gBAAgB;WAC/C,EAAE,mBAAmB;;;uCAGO,EAAE,eAAe;WAC7C,EAAE,kBAAkB;;;oCAGK,EAAE,YAAY;;;;;;;;;oBAS9B,EAAE,gBAAgB;KACjC,EAAE,gBAAgB;;;;;;;;oBAQH,EAAE,eAAe;KAChC,EAAE,eAAe;;;;SAIb,EAAE,eAAe;;;;;0BAKA,EAAE,gBAAgB;yBACnB,EAAE,eAAe;KACrC,EAAE,YAAY;;;;;;;;;eASJ,EAAE,mBAAmB;;;;wBAIZ,EAAE,mBAAmB;;;;uBAItB,EAAE,gBAAgB;sBACnB,EAAE,eAAe;;KAElC,EAAE,mBAAmB;;;;;;;;;;;EAWxB;;;AC1FF,SAAS,4BAA4B,WAAmC;AACtE,KAAI,CAAC,UAAU,KACb,OAAM,IAAI,MAAM,4BAA4B;AAE9C,QAAO,WAAW,UAAU,KAAK;;AAGnC,SAAS,2BAA2B,WAAmC;AACrE,KAAI,CAAC,UAAU,KACb,OAAM,IAAI,MAAM,4BAA4B;AAE9C,QAAO,UAAU,UAAU,KAAK;;AAGlC,SAAS,8BAA8B,WAAmC;AACxE,KAAI,CAAC,UAAU,KACb,OAAM,IAAI,MAAM,4BAA4B;AAE9C,QAAO,aAAa,UAAU,KAAK;;AAGrC,SAAS,yBAAyB,WAAmC;AAEnE,QAAO,GADyB,4BAA4B,UAAU,CACpC;;AAGpC,SAAS,gCAAgC,SAAgC;AAIvE,QAAO,YAH6B,QACjC,SAAS,WAAW,OAAO,WAAW,IAAI,yBAAyB,CAAC,CACpE,KAAK,MAAM,CACiC;;AAGjD,SAAS,2BACP,QACA,uBACA;AAGA,QAAO,YAAY,wBAFU,WAAW,OAAO,KAAK,CAEY,qCADpC,UAAU,OAAO,KAAK,CACuE;;AAG3H,SAAS,6BACP,SACA,uBACA;AACA,QAAO,QACJ,KAAK,WAAW,2BAA2B,QAAQ,sBAAsB,CAAC,CAC1E,KAAK,KAAK;;AAGf,SAAS,mCAAmC,WAAmC;CAC7E,MAAM,uBAAuB,yBAAyB,UAAU;CAChE,MAAM,4BAA4B,8BAA8B,UAAU;AAC1E,KAAI,UAAU,WAAW,KACvB,QAAO,IAAE;+FACkF,0BAA0B;EACvH;AAEA,QAAO,IAAE,GAAG,qBAAqB,yBAAyB;;AAG5D,SAAS,yBACP,QACA,uBACA;AAEA,QAAO,GAAG,wBADmB,WAAW,OAAO,KAAK,CACG;;AAGzD,SAAS,kBAAkB,WAAmC;AAE5D,QAAO,GADwB,2BAA2B,UAAU,CACnC;;AAGnC,SAAS,wBACP,QACA,WACA,uBACA;AAOA,QAAO,IAAE;IANoB,yBAC3B,QACA,sBACD,CAIsB,GAHD,kBAAkB,UAAU,CAGV;IACtC;;AAGJ,SAAS,iCACP,QACA,uBACA;AACA,QAAO,OAAO,WAAW,KACtB,cACC,IAAE;cACM,8BAA8B,UAAU,CAAC;UAC7C,mCAAmC,UAAU,CAAC;UAC9C,wBAAwB,QAAQ,WAAW,sBAAsB,CAAC;;;QAGpE,IACL;;AAGH,SAAS,mCACP,SACA,uBACA;AACA,QAAO,QACJ,KAAK,WACJ,iCAAiC,QAAQ,sBAAsB,CAAC,KAC9D,KACD,CACF,CACA,KAAK,KAAK;;AAGf,MAAa,uCACX,MAEA,IAAE;;;;;gBAKY,EAAE,YAAY,WAAW,EAAE,kBAAkB;;EAE3D,6BAA6B,EAAE,cAAc,SAAS,EAAE,sBAAsB,CAAC;;EAE/E,gCAAgC,EAAE,cAAc,QAAQ,CAAC;;mCAExB,EAAE,YAAY;;;;;;SAMxC,mCAAmC,EAAE,cAAc,SAAS,EAAE,sBAAsB,CAAC;;;;;;gCAM9D,EAAE,YAAY;EAC5C;;;ACzJF,MAAa,mCAAmC,IAAE;;;;;;;EAOhD;;;ACNF,SAAS,yBACP,gBACA,gBACA;AACA,KAAI,eAAgB,QAAO;AAE3B,QAAO,QAAQ,eAAe;;AAGhC,SAAS,0BACP,gBACA,gBACA;AACA,KAAI,CAAC,eAAgB,QAAO;AAC5B,QAAO;;AAET,MAAa,iCAAiC,MAC5C,IAAE;;;;;;gBAMY,EAAE,eAAe;;IAE7B,EAAE,UAAU,MAAM,EAAE,gBAAgB;IACpC,0BAA0B,EAAE,gBAAgB,EAAE,eAAe,CAAC;;;EAGhE,yBAAyB,EAAE,gBAAgB,EAAE,eAAe,CAAC;;OAExD,EAAE,YAAY;YACT,EAAE,gBAAgB;WACnB,EAAE,eAAe;;OAErB,EAAE,mBAAmB,gBAAgB,EAAE,YAAY;;;;;IAKtD,EAAE,gBAAgB;IAClB,EAAE,eAAe;IACjB,EAAE,YAAY;IACd,EAAE,eAAe;IACjB,EAAE,mBAAmB;;EAEvB;;;AC9CF,MAAa,iCAAiC,MAC5C,IAAE;;;;;;;;;;;;;;;;WAgBO,EAAE,yBAAyB;;IAElC,EAAE,qCAAqC;IACvC,EAAE,kCAAkC;IACpC,EAAE,+BAA+B;IACjC,EAAE,4BAA4B;;gBAElB,EAAE,gBAAgB,IAAI,EAAE,eAAe,IAAI,EAAE,YAAY;;mCAEtC,EAAE,gBAAgB,KAAK,EAAE,mBAAmB;kCAC7C,EAAE,eAAe,KAAK,EAAE,kBAAkB;;yCAEnC,EAAE,YAAY;sBACjC,EAAE,mBAAmB,UAAU;;;;;;;;;;yCAUZ,EAAE,yBAAyB;;;;;;;;;;;;;;iBAcnD,EAAE,4BAA4B;;;iBAG9B,EAAE,kCAAkC;;;iBAGpC,EAAE,+BAA+B;;;iBAGjC,EAAE,qCAAqC;;;EAGtD;;;AClEF,MAAa,kCAAkC,MAC7C,IAAE;;;;;;;;;;;;;IAaA,EAAE,eAAe;IACjB,EAAE,mBAAmB;UACf,EAAE,kBAAkB;;IAE1B,EAAE,qCAAqC;IACvC,EAAE,+BAA+B;;;oBAGjB,EAAE,uBAAuB;kBAC3B,EAAE,gBAAgB;;;OAG7B,EAAE,mBAAmB,qBAAqB,EAAE,eAAe;;;SAGzD,EAAE,+BAA+B;;;;+BAIX,EAAE,uBAAuB;kBACtC,EAAE,oBAAoB;OACjC,EAAE,mBAAmB,qBAAqB,EAAE,eAAe;;;IAG9D,EAAE,qCAAqC;;;;sBAIrB,EAAE,uBAAuB;kBAC7B,EAAE,2BAA2B;;4CAEH,EAAE,+BAA+B;;;sBAGvD,EAAE,uBAAuB;kBAC7B,EAAE,4BAA4B;;6CAEH,EAAE,+BAA+B;;EAE5E;;;ACtDF,MAAa,6BAA6B,IAAE;;;;;;;;;;;EAW1C;;;ACNF,SAAgB,gCAAgC,EAC9C,aACA,wBACA,WACsC;AAwBtC,QAvBiB,IAAE;;;;;;;kBAOH,YAAY;;;;;;sCAMQ,uBAAuB;iBAC5C,uBAAuB,wBAAwB,YAAY;eAC7D,QAAQ;;;;;;EAQL;;;;ACjClB,MAAa,oCAAoC,IAAE;;;;;;EAMjD;;;ACNF,MAAa,gCAAgC,IAAE;;EAE7C;;;ACDF,MAAa,iCAAiC,MAC5C,IAAE;;;;;;;;;;;;;;;IAeA,EAAE,yBAAyB;IAC3B,EAAE,+BAA+B;IACjC,EAAE,qCAAqC;IACvC,EAAE,4BAA4B;IAC9B,EAAE,kCAAkC;UAC9B,EAAE,kBAAkB;;;YAGlB,EAAE,uBAAuB;4BACT,EAAE,uBAAuB;;;;gDAIL,EAAE,yBAAyB;;;4BAG/C,EAAE,uBAAuB;;;;aAIxC,EAAE,+BAA+B;aACjC,EAAE,4BAA4B;;+CAEI,EAAE,uBAAuB;;;;eAIzD,EAAE,qCAAqC;eACvC,EAAE,+BAA+B;;;;;;;;;;;aAWnC,EAAE,4BAA4B;aAC9B,EAAE,kCAAkC;aACpC,EAAE,+BAA+B;aACjC,EAAE,qCAAqC;;;;;;;;;;;aAWvC,EAAE,4BAA4B;aAC9B,EAAE,kCAAkC;aACpC,EAAE,+BAA+B;aACjC,EAAE,qCAAqC;;;;;;;;;aASvC,EAAE,+BAA+B;aACjC,EAAE,qCAAqC;;;;;;;;;aASvC,EAAE,+BAA+B;aACjC,EAAE,qCAAqC;;;;;;;;;aASvC,EAAE,+BAA+B;aACjC,EAAE,qCAAqC;;;;;;;;;aASvC,EAAE,+BAA+B;;QAEtC,EAAE,qCAAqC;;;;;;EAM7C;;;ACjHF,SAAS,6BAA6B,QAA6B;AAEjE,QAAO,GADsB,WAAW,OAAO,KAAK,CACrB;;AAGjC,SAAS,qCACP,YACA;AACA,QAAO,KACL,YACA,IAAI,KAAK,OAAO,CAAC,EACjB,OAAO,SAAS,EAChB,KAAK,MAAM,UAAU,EAAE,CAAC,CACzB;;AAGH,SAAS,mCACP,YACA;AACA,QAAO,KACL,YACA,IAAI,KAAK,OAAO,CAAC,EACjB,OAAO,SAAS,EAChB,KAAK,MAAM,GAAG,WAAW,EAAE,CAAC,aAAa,CAC1C;;AAGH,SAAgB,yBACd,WACA,gCACA;AACA,KAAI,UAAU,SAAS,KACrB,OAAM,IAAI,MAAM,6BAA6B;CAE/C,MAAM,sBAAsB,UAAU,UAAU,KAAK;CACrD,MAAM,uBAAuB,WAAW,UAAU,KAAK;CACvD,MAAM,yBAAyB,aAAa,UAAU,KAAK;CAC3D,MAAM,wBAAwB,GAAG,qBAAqB;CACtD,MAAM,QAAQ,UAAU;AACxB,QAAO,IAAE;sBACW,oBAAoB;;;cAG5B,sBAAsB;;;;;cAKtB,oBAAoB;;;iBAGjB,+BAA+B;4CACJ,MAAM;4CACN,MAAM;eACnC,uBAAuB;;4CAEM,MAAM;4CACN,MAAM;;IAE9C;;AAGJ,SAAgB,yBAAyB,GAAqC;CAC5E,MAAM,iBAAiB,qCACrB,EAAE,OAAO,WACV;CACD,MAAM,mBAAmB,mCACvB,EAAE,OAAO,WACV;AAQD,QAPoB;EAClB;EACA;EACA,EAAE;EACF,GAAG;EACH,GAAG;EACJ;;AAIH,SAAgB,sBAAsB,GAAqC;AAEzE,QAAO,IAAE;;MADW,yBAAyB,EAAE,CAAC,KAAK,KAAK,CAG1C;YACN,EAAE,kBAAkB;IAC5B;;AAGJ,SAAS,2BACP,YACA,gCACA;AACA,QAAO,WACJ,KAAK,cACJ,yBAAyB,WAAW,+BAA+B,CACpE,CACA,KAAK,OAAO;;AAEjB,MAAa,iDACX,MAEA,IAAE;;;;;;;;;;;IAWA,EAAE,+BAA+B;IACjC,qCAAqC,EAAE,OAAO,WAAW,CAAC;IAC1D,mCAAmC,EAAE,OAAO,WAAW,CAAC;UAClD,EAAE,kBAAkB;;YAElB,6BAA6B,EAAE,OAAO,CAAC;IAC/C,2BAA2B,EAAE,OAAO,YAAY,EAAE,+BAA+B,CAAC;;;EAGpF;;;AC/HF,MAAa,2BAA2B,MACtC,IAAE;;;;;;;;iBAQa,EAAE,oBAAoB;qBAClB,EAAE,mBAAmB,GAAG;;;;;IAKzC;;;ACfJ,MAAa,6BAA6B,MACxC,IAAE;;gBAEY,EAAE,YAAY,YAAY,EAAE,UAAU,EAAE,WAAW,EAAE,wBAAwB,IAAI,EAAE,UAAU,EAAE;gBAC/F,EAAE,YAAY,YAAY,EAAE,QAAQ,WAAW,EAAE,wBAAwB,IAAI,EAAE,QAAQ;;;+BAGxE,EAAE,UAAU,EAAE;;sBAEvB,EAAE,QAAQ;;;;;;gBAMhB,EAAE,QAAQ;eACX,EAAE,QAAQ;;;;EAIvB;;;ACpBF,MAAa,8BAA8B,EACzC,aACA,6BAEA,IAAE;;;;;;gBAMY,YAAY;;;;oBAIR,uBAAuB;yCACF,YAAY;EACnD;;;ACnBF,SAAgB,gBAAgB,eAAyB;AACvD,KAAI,CAAC,cAAc,OAAQ,QAAO;AAClC,QAAO,cAAc,KAAK,SAAS,IAAI,KAAK,GAAG,CAAC,KAAK,KAAK;;;;ACC5D,MAAa,4BAA4B,MAKvC,IAAE;;;;;;;;WAQO,EAAE,eAAe;;eAEb,EAAE,cAAc;;;uBAGR,EAAE,eAAe;;;;;yBAKf,gBAAgB,EAAE,cAAc,CAAC;;;;;EAKxD;;;AC7BF,MAAa,yBAAyB,IAAE;;;;;;;EAOtC;;;ACPF,MAAa,8BAA8B,MACzC,IAAE;;;;eAIW,EAAE,eAAe;kCACE,EAAE,eAAe;;;;;;;;;;;;;;;;;;;;;;;;EAwBjD;;;AC9BF,MAAa,0BAA0B,IAAE;;;;;;;;EAQvC;;;ACRF,MAAa,4BAA4B,IAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAoCzC;;;ACpCF,MAAa,0BAA0B,IAAE;;;;;;EAMvC;;;ACLF,MAAa,+BAA+B,MAK1C,IAAE;;;;;;;;;WASO,EAAE,eAAe;;eAEb,EAAE,cAAc;;sBAET,EAAE,eAAe;;;4DAGqB,EAAE,eAAe;;;;;;;;qBAQxD,gBAAgB,EAAE,cAAc,CAAC;;;;;0BAK5B,EAAE,eAAe;;;;;;;;EAQzC;;;AC3CF,MAAa,4BAA4B,IAAE;;;;;;;EAOzC;;;ACPF,MAAa,uCACX,IAAE;;;;;;;;;;;;;;;;;;;;EAoBF;;;ACrBF,MAAM,0BACJ;AACF,MAAa,iCAAiC,MAC5C,IAAE;;;;;;eAMW,EAAE,eAAe;;;;;;;;;;MAU1B,wBAAwB;;;;;;;;EAQ5B;;;AC3BF,MAAa,mCACX,IAAE;;;;;;;;;EASF;;;ACVF,MAAa,6BAA6B,MAIxC,IAAE;;;;;;eAMW,EAAE,eAAe;YACpB,EAAE,cAAc;;;;;;;EAO1B;;;AClBF,MAAa,gCACX,IAAE;;;;;EAKF;;;ACNF,MAAa,gCAAgC,MAI3C,IAAE;;;;;;EAMF,EAAE,eAAe;;OAEZ,EAAE,eAAe;;;;;MAKlB,EAAE,cAAc,IAAI,EAAE,eAAe;;;;EAIzC;;;ACrBF,MAAa,mCAAmC,MAI9C,IAAE;;;;;;;;QAQI,EAAE,cAAc;;MAElB,EAAE,eAAe;;;;;;;EAOrB;;;ACrBF,MAAa,aAAa;CACxB,qBAAqB;CACrB,aAAa;CACb,iBAAiB;CACjB,iBAAiB;CACjB,WAAW;CACX,MAAM;CACN,OAAO;CACP,MAAM;CACP;AAED,SAAgB,UAA8B,MAAgB,MAAS;AAMrE,QALa,IAAI,MAAM;EACrB,YAAY;EACZ;EACD,CAAC;;AAKJ,SAAgB,YAAY,MAAgB;CAC1C,MAAM,SAMD,EAAE;CACP,MAAM,OAAO,UAAU,MAAM,WAAW;AAExC,KAAI,uBAAuB,KACzB,QAAO,oBAAoB,KAAK;AAGlC,KAAI,eAAe,KACjB,QAAO,aAAa,KAAK;AAG3B,KAAI,mBAAmB,KACrB,QAAO,aAAa;AAEtB,KAAI,mBAAmB,KACrB,QAAO,cAAc;AAEvB,KAAI,aAAa,KACf,QAAO,QAAQ;AAGjB,QAAO;;;;;;;;AC9CT,MAAa,oCAAoC;CAC/C,iBAAiB;CACjB,+BAA+B;CAC/B,sBAAsB;CACtB,yBAAyB;CAC1B;;;;;;ACgBD,SAAgB,wBAAwB,EACtC,SACA,mBAC8B;CAC9B,MAAM,EAAE,WAAW,sBAAsB,qBACvC,SACA,kBACD;CAGD,MAAM,6BAA6B,KACjC,YAH4B,kBAAkB,SAAS,EAGpB,EAAE,eAAe,MAAM,CAAC,EAC3D,IAAI,uBAAuB,EAC3B,OAAO,UAAU,EACjB,QAAQ,UAAU,cAAc,MAAM,IAAI,gBAAgB,CAAC,EAC3D,OAAO,EACP,KAAK,OAAO,EACZ,KAAK,UAAU,8BAA8B,CAC9C;AAED,KAAI,CAAC,2BACH,OAAM,IAAI,MACR,2DAA2D,gBAAgB,GAC5E;CAGH,MAAM,EAAE,uBAAuB,uBAC7B;AASF,QAPgE;EAC9D;EACA,+BAA+B;EAC/B,sBAAsB;EACtB,yBAAyB,KAAK,mBAAmB,sBAAsB;EACxE;;;;;ACtDH,eAAsB,6BAA6B,YAAwB;AACzE,YAAW,iBAAiB;CAE5B,MAAM,gBAAgB,MAAM,WADT,WAAW,aAAa,CACO;AAClD,YAAW,gBAAgB,cAAc;;AAG3C,eAAsB,WACpB,YACA,SAAkD,cAClD;AACA,KAAI;AAIF,SAHsB,MAAM,OAAO,YAAY,EAC7C,QACD,CAAC;UAEK,OAAO;AACd,UAAQ,MAAM,MAAM;AACpB,SAAO;;;AAIX,eAAsB,cAAc;AAClC,OAAM,WAAW,OAAO;EAAC;EAAY;EAAW;EAAI,CAAC;;;;ACjBvD,MAAM,uBAAuB,EAAE,OAAO;CACpC,MAAM,EAAE,QAAQ;CAChB,IAAI,EAAE,QAAQ;CACd,SAAS,EAAE,QAAQ;CACnB,eAAe,EAAE,MAAM,EAAE,QAAQ,CAAC;CACnC,CAAC;AAEF,SAAgB,kBAAkB,SAAkB,SAAiB;CACnE,MAAM,EAAE,WAAW,cAAc,qBAC/B,SACA,KAAK,WAAW,QAAQ,CACzB;CACD,MAAM,iBAAiB,KACrB,YACC,QAAQ,IAAI,cAAc,YAAY,GACtC,gBAAgB;EACf,SAAS,YAAY,cAAc,CAAC,aAAa;EACjD,IAAI,uBAAuB,YAAY,KAAK;EAC5C,MAAM,uBAAuB,YAAY,OAAO;EAChD,eAAe,+BACb,YACA,gBACD;EACF,IACA,SAAS,qBAAqB,UAAU,KAAK,CAC/C;AAED,KAAI,eAAe,QAAS,QAAO,eAAe;;AAIpD,SAAgB,eAAe,SAAkB,SAAiB;CAChE,MAAM,iBAAiB,kBAAkB,SAAS,QAAQ;AAC1D,KACE,CAAC,kBACD,aAAa,6BAA6B,eAAe,cAAc,CAEvE,QAAO,KAAA;AAgBT,QAAO,MAAM,gBAdO,KAClB,QAAQ,cAAc,KAAK,WAAW,SAAS,YAAY,CAAC,GAC3D,gBAAgB;EACf,sBAAsB,wBACpB,YACA,wBACA,KACD;EACD,sBAAsB,+BACpB,YACA,uBACD;EACF,EACF,CACwC;;;;AC3C3C,SAAgB,qBAAqB,SAAkB,SAAiB;CACtE,MAAM,EAAE,WAAW,kBAAkB,qBACnC,SACA,aACD;CACD,MAAM,EAAE,WAAW,iBAAiB,qBAClC,SACA,KAAK,cAAc,QAAQ,CAC5B;CAED,MAAM,wBAAwB,kBAC5B,cAAc,cAAc,aAAa,CAC1C;CAED,MAAM,4BAA4B,kBAChC,cAAc,cAAc,iBAAiB,CAC9C;AAED,QAAO,KAAK,UAAU,aAAa;EACjC,eAAe;EAGf,eAAe,YACb,SACA,CAAC,MAAM,aAAa,sBAAsB,CAAC,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,EACvE,CAAC,MAAM,aAAa,0BAA0B,CAAC,EAAE,SAAS,CAAC,UAAU,CAAC,CAAC,EACvE,SAAS,CAAC,eAAe,UAAU,CAAC,CACrC;EACD,eAAe,KAEb,aAAa,cAAc,eAAe,IACxC,aAAa,cAAc,WAAW,EACxC,mBAEA,YACE,EACG,eAAe,aAAa,yBAAyB,WAAW,EACjE,SAAS,eAAe,CACzB,EACD,EACG,eAAe,aAAa,mBAAmB,WAAW,EAC3D,SAAS,YAAY,CACtB,EACD,SAAS,YAAY,CACtB,CACF;EACD,eAAe,+BACb,aAAa,cAAc,aAAa,EACxC,gBACD;EACF,EAAE;;AAGL,MAAM,qBAAqB,eACzB,KACE,YACA,8BACA,OAAO,WAAW,cAAc,CAAC,EACjC,IAAI,MAAM,IAAI,CAAC,EACf,KAAK,MAAM,EAAE,GAAG,EAAE,CAAC,EACnB,OAAO,SAAS,CACjB;;;AC7EH,SAAgB,oBAAoB,SAAkB,SAAiB;CACrE,MAAM,EAAE,WAAW,gBAAgB,qBACjC,SACA,KAAK,aAAa,QAAQ,CAC3B;AAED,QAAO,KACL,YAAY,cAAc,WAAW,EAAE,YAAY,IAAI,EAAE,EACzD,MACG,qBACC,iBAAiB,cAAc,EAAE,SAAS,CAAC,SAAS,eAAe,IACnE,MACH,GACA,qBACC,kBACI,oBAAoB,OAAO,EAC3B,OAAO,WAAW,oBAAoB,EACtC,qBAAqB,WAAW,cAAc,EAC9C,iBAAiB,GACtB,kBAAkB,EAAE,cAAc,EACpC;;;;;;;ACnBH,SAAgB,sBAAsB,SAAkB,UAAkB;CACxE,MAAM,UAAU,KAAK,QAAQ,SAAS;AACtC,KAAI,CAAC,QAAQ,aAAa,QAAQ,CAChC,SAAQ,gBAAgB,QAAQ;CAElC,MAAM,aAAa,QAAQ,cAAc,SAAS;AAClD,KAAI,CAAC,WAIH,QAAO;EACL,eAAe;EACf,YALoB,QAAQ,iBAAiB,UAAU,IAAI,EAC3D,WAAW,MACZ,CAAC;EAID;AAEH,QAAO;EACL,eAAe;EACf;EACD;;;;;AAMH,SAAgB,qBAAqB,SAAkB,SAAiB;CACtE,MAAM,YAAY,QAAQ,aAAa,QAAQ;AAC/C,KAAI,CAAC,UAEH,QAAO;EACL,eAAe;EACf,WAHmB,QAAQ,gBAAgB,QAAQ;EAIpD;AAEH,QAAO;EACL,eAAe;EACf;EACD;;;;;AAMH,eAAsB,uBACpB,SACA,GAAG,eACH;AACA,MAAK,MAAM,WAAW,cAEpB,KAAI,CADQ,QAAQ,aAAa,QAAQ,CAGvC,OADe,QAAQ,gBAAgB,QAAQ,CAClC,MAAM;;AAKzB,SAAgB,6BAA6B,MAI1C;CACD,MAAM,EAAE,SAAS,SAAS,aAAa;CACvC,MAAM,kBAAkB,UAAU;AAClC,KAAI,kBAAkB,EAAG;CACzB,MAAM,0BAA0B,SAAS,QACvC,KAAK,QAAQ,IACb,KAAK,gBAAgB,GACtB;AAID,QAF4B,QAAQ,cAAc,wBAAwB;;;;;;;ACpE5E,SAAgB,mBACd,aACA,YACA;CACA,MAAM,sBAA+C,EAAE;AACvD,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,YAAY,EAAE;EACtD,MAAM,qBAAqB,wBAAwB,KAAK,MAAM;AAC9D,sBAAoB,KAAK,mBAAmB;;CAE9C,MAAM,gBAAgB,GAAG,QAAQ,8BAC/B,qBACA,KACD;AAGD,QADkB,iBAAiB,WAAW,CAC7B,cAAc;;AAGjC,SAAS,aAAa;AACpB,QAAO,GAAG,QAAQ,aAAa;;AAGjC,SAAS,YAAY;AACnB,QAAO,GAAG,QAAQ,YAAY;;AAGhC,SAAS,aAAa,OAAgB;AACpC,QAAO,QAAQ,WAAW,GAAG,YAAY;;AAG3C,SAAS,YAAY;AACnB,QAAO,GAAG,QAAQ,YAAY;;AAGhC,SAAS,iBAAiB;AACxB,QAAO,GAAG,QAAQ,iBAAiB,YAAY;;AAGjD,SAAS,oBAAoB,OAAe;AAC1C,QAAO,GAAG,QAAQ,qBAAqB,MAAM;;AAG/C,SAAgB,mBAAmB,OAAe;AAChD,QAAO,GAAG,QAAQ,oBAAoB,MAAM;;AAG9C,SAAS,kBAAkB,UAA2B;AACpD,QAAO,GAAG,QAAQ,6BAA6B,UAAU,KAAK;;AAGhE,SAAS,kBAAkB,OAA+B;AACxD,KAAI,UAAU,KAAM,QAAO,WAAW;AACtC,KAAI,UAAU,KAAA,EAAW,QAAO,gBAAgB;AAChD,KAAI,OAAO,UAAU,UAAW,QAAO,aAAa,MAAM;AAC1D,KAAI,OAAO,UAAU,SAAU,QAAO,mBAAmB,MAAM;AAC/D,KAAI,OAAO,UAAU,SAAU,QAAO,oBAAoB,MAAM;AAEhE,KAAI,MAAM,QAAQ,MAAM,CAEtB,QAAO,kBADU,MAAM,KAAK,SAAS,kBAAkB,KAAK,CAAC,CAC3B;AAGpC,KAAI,OAAO,UAAU,SACnB,QAAO,GAAG,QAAQ,8BAChB,OAAO,QAAQ,MAAM,CAAC,KAAK,CAAC,KAAK,OAAO;EACtC,MAAM,OAAO,GAAG,QAAQ,iBAAiB,IAAI;AAC7C,SAAO,GAAG,QAAQ,yBAAyB,MAAM,kBAAkB,EAAE,CAAC;GACtE,EACF,KACD;AAGH,OAAM,IAAI,MAAM,0BAA0B,MAAM;;AAGlD,SAAS,wBAAwB,MAAc,OAAgB;CAC7D,MAAM,iBAAiB,GAAG,QAAQ,iBAAiB,KAAK;CACxD,MAAM,kBAAkB,kBAAkB,MAAM;AAOhD,QAL2B,GAAG,QAAQ,yBACpC,gBACA,gBACD;;AAKH,SAAS,iBAAiB,YAAwB;CAChD,MAAM,UAAU,GAAG,cAAc,EAAE,SAAS,GAAG,YAAY,UAAU,CAAC;AACtE,SAAQ,SACN,QAAQ,UAAU,GAAG,SAAS,aAAa,MAAM,WAAW,aAAa;;;;;;;AChE7E,SAAgB,iBAAiB,WAA0C;AACzE,QAAO,WACH,iBAAiB,CAClB,GAAG,EAAE,EACJ,qBAAqB,WAAW,wBAAwB;;;AAI9D,SAAgB,kBACd,QACA,cACA,cACA;AACA,QAAO,QACH,YAAY,aAAa,EACzB,OAAO,WAAW,mBAAmB,EACrC,aAAa,CACd,MAAM,UAAU,MAAM,SAAS,KAAK,aAAa,EAChD,cAAc,aAAa;;AAGjC,SAAgB,iCACd,YACA,UACA;AASA,QARoB,WAAW,wBAAwB,gBAAgB;AAIrE,OADuB,YAAY,aAAa,EAAE,SAAS,IAAI,IAC5C,SAAS,SAAS,CAAE,QAAO;AAE9C,SAAO,YAAY,SAAS,CAAC,SAAS,CAAC,SAAS,SAAS;GACzD;;AAIJ,SAAgB,2BACd,YACA,cACA;AACA,KAAI,CAAC,UAAU,WAAW,CAAE,QAAO,KAAA;AAEnC,QAAO,KACL,WAAW,qBAAqB,WAAW,mBAAmB,GAC7D,eAAe,cAAc,WAAW,SAAS,EAAE,aAAa,CAClE;;AAGH,SAAgB,uBACd,YACA,cACA;AACA,QAAO,2BAA2B,YAAY,aAAa,EACvD,yBAAyB,WAAW,cAAc,EAClD,iBAAiB;;AAGvB,SAAgB,+BACd,YACA,cACA;AACA,QAAO,KACL,2BAA2B,YAAY,aAAa,EAChD,yBAAyB,WAAW,uBAAuB,EAC3D,aAAa,IAAI,EAAE,EACvB,KAAK,YACH,QAAQ,OAAO,WAAW,cAAc,EAAE,iBAAiB,CAC5D,EACD,OAAO,SAAS,CACjB;;AAGH,SAAgB,wBACd,YACA,cACA,UACA;AACA,QAAO,KACL,2BAA2B,YAAY,aAAa,EAAE,gBAAgB,IACpE,EAAE,EACJ,KAAK,eAAe,WAAW,SAAS,CAAC,EACzC,YACE,EAAE,UAAU,aAAa,WAAW,aAAa,MAAM,EAAE,SAAS,KAAK,CAAC,EACxE,EACG,UAAU,aAAa,WAAW,cAAc,MAAM,EACvD,SAAS,MAAM,CAChB,EACD,SAAS,SAAS,CACnB,CACF;;AAGH,SAAgB,uBACd,QACsC;AACtC,KAAI,CAAC,YAAY,OAAO,CAAE,QAAO,KAAA;CAEjC,MAAM,cAAc,KAClB,SACC,QAAQ,KAAK,KAAK,IAAI,YAAY,GAAG,IAAI,KAAK,GAAG,IAAI,KAAK,OAAO,EAClE,KAAK,gBAAgB,iBAAiB,GACrC,cAAc,gCAAgC,CAAC,UAAU,UAAU,CACrE;AAED,KAAI,CAAC,YAAY,SAAS;AACxB,UAAQ,MAAM,YAAY,MAAM;AAChC;;AAGF,QAAO,YAAY;;AAGrB,SAAgB,kBAAkB,YAAoC;AACpE,QAAO,KACL,YAAY,uBAAuB,IAAI,EAAE,EACzC,SAAS,sBAAsB,kBAAkB,iBAAiB,CAAC,EACnE,KAAK,oBAAoB,gBAAgB,SAAS,CAAC,CACpD;;AAGH,SAAgB,6BACd,YACA;AACA,QAAO,KACL,YAAY,uBAAuB,IAAI,EAAE,EACzC,SAAS,sBACP,kBAAkB,oBAAoB,CAAC,iBAAiB,CACzD,CACF;;;;AC7JH,MAAa,0BAA0B;CAErC,6BAA6B;CAE7B,qBAAqB;CAErB,sBAAsB;EACpB,mBAAmB;EACnB,iBAAiB,gBAAgB;EACjC,kDAAkD;EACnD;CACF;;AAGD,SAAgB,yBAAyB,kBAA0B;AACjE,QAAO;EACL,GAAG;EACH;EACD;;;AAIH,SAAgB,oBAAoB,YAAoB;AAEtD,SAAQ,MAAM,WAAW;AAQzB,QANgB,IAAI,QAAQ;EAC1B,kBAFuB,KAAK,KAAK,YAAY,gBAAgB;EAK7D,8BAA8B;EAC/B,CAAC;;;;AC/BJ,SAAgB,iBAAiB,YAAyC;CACxE,MAAM,EAAE,QAAQ,UAAU;CAC1B,MAAM,SAAS;EAAE;EAAQ;EAAO;AAEhC,QAAO,QAAQ,OAAO,CAAC,SAAS,CAAC,OAAO,WAAW;AACjD,MAAI,CAAC,mBAAmB,MAAM,OAAO,IAAI,MAAM,iBAAiB,GAC9D,OAAM,IAAI,MACR,GACE,MAAM,OAAO,EAAE,CAAC,mBAAmB,GAAG,MAAM,MAAM,EAAE,CACrD,8DACF;GAEH;AAEF,QAAO;EACL,oBAAoB,iBAAiB,OAAO,aAAa;EACzD,mBAAmB,iBAAiB,MAAM,aAAa;EACxD;;AAGH,SAAS,mBAAmB,QAA2B;AACrD,QAAO,WAAW,MAAM,CAAC,OAAO,SAAS,IAAI;;AAG/C,SAAS,iBAAiB,OAAe;AACvC,QAAO,UAAU,KAAK,OAAO;;;;;;;;;;ACV/B,SAAgB,2BACd,oBACoC;CACpC,MAAM,SAAmB,EAAE;AAG3B,KACE,CAAC,mBAAmB,MACpB,OAAO,mBAAmB,OAAO,YACjC,mBAAmB,GAAG,MAAM,KAAK,GAEjC,QAAO,KAAK,6DAA2D;AAGzE,KACE,CAAC,mBAAmB,QACpB,OAAO,mBAAmB,SAAS,YACnC,mBAAmB,KAAK,MAAM,KAAK,GAEnC,QAAO,KAAK,+DAA6D;AAI3E,KAAI,OAAO,mBAAmB,cAAc,SAC1C,QAAO,KAAK,0CAAwC;AAItD,KACE,CAAC,MAAM,QAAQ,mBAAmB,eAAe,IACjD,mBAAmB,eAAe,WAAW,GAC7C;AACA,SAAO,KACL,wEACD;AACD,SAAO;GAAE,SAAS;GAAO;GAAQ;;CAInC,MAAM,aACJ,mBAAmB,eACjB,mBAAmB,eAAe,SAAS;AAG/C,KAAI,CAAC,YAAY;AACf,SAAO,KAAK,6CAA6C;AACzD,SAAO;GAAE,SAAS;GAAO;GAAQ;;AAInC,KAAI,CAAC,WAAW,OAAO;AACrB,SAAO,KAAK,sDAAoD;AAChE,SAAO;GAAE,SAAS;GAAO;GAAQ;;AAInC,KAAI,CAAC,WAAW,MAAM,OACpB,QAAO,KAAK,6DAA2D;MAClE;EACL,MAAM,cAAc,WAAW,MAAM;AAErC,MAAI,OAAO,YAAY,WAAW,SAChC,QAAO,KAAK,2CAAyC;AAGvD,MAAI,OAAO,YAAY,iBAAiB,SACtC,QAAO,KAAK,iDAA+C;AAQ7D,MAHE,YAAY,UACZ,YAAY,OAAO,MAAM,KAAK,MAC9B,YAAY,OAAO,SAAS,IAAI,KAG/B,CAAC,YAAY,gBAAgB,YAAY,aAAa,MAAM,KAAK,IAElE,QAAO,KACL,oEACD;;AAKL,KAAI,CAAC,WAAW,MAAM,MACpB,QAAO,KAAK,4DAA0D;MACjE;EACL,MAAM,aAAa,WAAW,MAAM;AAEpC,MAAI,OAAO,WAAW,WAAW,SAC/B,QAAO,KAAK,0CAAwC;AAGtD,MAAI,OAAO,WAAW,iBAAiB,SACrC,QAAO,KAAK,gDAA8C;AAQ5D,MAHE,WAAW,UACX,WAAW,OAAO,MAAM,KAAK,MAC7B,WAAW,OAAO,SAAS,IAAI,KAG9B,CAAC,WAAW,gBAAgB,WAAW,aAAa,MAAM,KAAK,IAEhE,QAAO,KACL,mEACD;;AAKL,KAAI,CAAC,MAAM,QAAQ,WAAW,QAAQ,CACpC,QAAO,KAAK,qDAAmD;MAC1D;AAEL,MAAI,WAAW,QAAQ,WAAW,EAChC,QAAO,KAAK,6DAA6D;AAG3E,aAAW,QAAQ,SAAS,QAAQ,gBAAgB;AAClD,OACE,CAAC,OAAO,QACR,OAAO,OAAO,SAAS,YACvB,OAAO,KAAK,MAAM,KAAK,GAEvB,QAAO,KACL,mBAAmB,YAAY,wCAChC;AAGH,OAAI,CAAC,MAAM,QAAQ,OAAO,WAAW,CACnC,QAAO,KACL,WAAW,OAAO,QAAQ,YAAY,cAAc,mCACrD;QACI;AAEL,QAAI,OAAO,WAAW,WAAW,EAC/B,QAAO,KACL,WAAW,OAAO,QAAQ,YAAY,cAAc,4CACrD;AAGH,WAAO,WAAW,SAAS,WAAW,mBAAmB;KACvD,MAAM,cAAc,UAAU,QAAQ,YAAY;KAClD,MAAM,WAAW,OAAO,QAAQ,YAAY;AAG5C,SACE,CAAC,UAAU,QACX,OAAO,UAAU,SAAS,YAC1B,UAAU,KAAK,MAAM,KAAK,GAE1B,QAAO,KACL,aAAa,YAAY,cAAc,SAAS,yCACjD;AAIH,SACE,UAAU,WAAW,QACrB,OAAO,UAAU,WAAW,SAE5B,QAAO,KACL,cAAc,YAAY,eAAe,SAAS,wDACnD;AAIH,SACE,UAAU,UAAU,KAAA,KACpB,OAAO,UAAU,UAAU,SAE3B,QAAO,KACL,cAAc,YAAY,eAAe,SAAS,oDACnD;AAIH,SAAI,CAAC,MAAM,QAAQ,UAAU,OAAO,CAClC,QAAO,KACL,cAAc,YAAY,eAAe,SAAS,+BACnD;MAEH;;IAEJ;;AAGJ,QAAO;EACL,SAAS,OAAO,WAAW;EAC3B;EACD;;;;;AChMH,SAAgB,qBAAqB,EACnC,SACA,eACA,YACA,iBACA,UACA,+BAC2B;AAC3B,KAAI,mBAAmB,CAAC,CAAC,4BACvB,OAAM,IAAI,MACR,sEACD;CAGH,MAAM,EAAE,eAAe,sBAAsB,SAD5B,KAAK,KAAK,eAAe,YAAY,CACS;AAE/D,YAAW,gBAAgB,GAAG;CAO9B,MAAM,WAAW,iCAAiC;EAChD;EACA;EACA,sBAR2B,WAAW,WAAW;EASjD,eARoB,kBAClB,KAAK,gBAAgB,MACrB,KAAK,UAAU,4BAA4B;EAO9C,CAAC;AACF,YAAW,gBAAgB,SAAS;;AAGtC,eAAsB,gBAAgB,MAGnC;CACD,MAAM,EAAE,SAAS,mBAAmB;CACpC,MAAM,aAAa,QAAQ,iBACzB,KAAK,KAAK,gBAAgB,aAAa,EACvC,iBACA,EAAE,WAAW,MAAM,CACpB;CAED,MAAM,eAAe,WAClB,8BAA8B,UAAU,CACxC,gCAAgC,WAAW,uBAAuB;AAErE,MACE,QAAQ,oBAAoB,eAAe,CAAC,0BAA0B,EACtE,QAAQ,eAAe,WAAW,aAAa,KAAK,YAAY,EAChE,UAAU,eAAe,WAAW,aAAa,CAAC,EAClD,KAAK,eACH,iCAAiC,YAAY,eAAe,CAC7D,EACD,OAAO,SAAS,EAChB,KAAK,yBAAyB;EAC5B,MAAM,oBAAoB,SAAS;EACnC,WAAW,oBACR,eAAe,CACf,cAAc,CACd,aAAa;EACjB,EAAE,EACH,KAAK,EAAE,MAAM,iBAAiB;EAC5B;EACA,cAAc,CAAC,KAAK;EACpB,iBAAiB,KAAK,KAAK,KAAK,WAAW,YAAY;EACxD,EAAE,EACH,SAAS,EAAE,MAAM,cAAc,sBAAsB;AACnD,aAAW,qBAAqB;GAC9B;GACA;GACD,CAAC;AACF,eAAa,WAAW,KAAK;GAC7B,CACH;AAED,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,qBAAqB,MAGxC;CACD,MAAM,EAAE,SAAS,mBAAmB;CACpC,MAAM,aAAa,QAAQ,iBACzB,KAAK,KAAK,gBAAgB,WAAW,EACrC,IACA,EAAE,WAAW,MAAM,CACpB;AAED,MACE,QAAQ,oBAAoB,eAAe,CAAC,0BAA0B,EACtE,QAAQ,eAAe,WAAW,aAAa,KAAK,YAAY,EAChE,UAAU,eAAe,WAAW,aAAa,CAAC,EAClD,KAAK,eACH,iCAAiC,YAAY,eAAe,CAC7D,EACD,OAAO,SAAS,EAChB,KAAK,yBAAyB;EAC5B,MAAM,oBAAoB,SAAS;EACnC,WAAW,oBACR,eAAe,CACf,cAAc,CACd,aAAa;EACjB,EAAE,EACH,KAAK,EAAE,MAAM,iBAAiB;EAC5B,cAAc,CAAC,KAAK;EACpB,iBAAiB,KAAK,KAAK,KAAK,WAAW,YAAY;EACxD,EAAE,EACH,SAAS,EAAE,cAAc,sBAAsB;AAC7C,aAAW,qBAAqB;GAC9B;GACA;GACD,CAAC;GACF,CACH;AAED,OAAM,6BAA6B,WAAW;;;;;ACxGhD,eAAsB,mBAAmB,EACvC,SACA,WACA,YACA,UACA,yBACA,wBACkB;CAClB,MAAM,EAAE,WAAW,eAAe,qBAAqB,SAAS,UAAU;CAC1E,MAAM,iBAAiB,WAAW,SAAS;CAC3C,MAAM,EAAE,WAAW,sBAAsB,qBACvC,SACA,kBACD;CACD,MAAM,wBAAwB,kBAAkB,SAAS;CACzD,MAAM,gBAAgB,KAAK,KAAK,gBAAgB,UAAU;CAC1D,MAAM,aAAa,WAAW,kBAAkB,CAAC,SAAS;CAC1D,MAAM,0BAA0B,KAAK,KAAK,eAAe,aAAa;AAEtE,OAAM,uBACJ,SACA,uBACA,gBACA,eACA,wBACD;AAED,OAAM,8BAA8B;EAClC;EACA;EACD,CAAC;AAEF,OAAM,uBAAuB;EAC3B;EACA;EACD,CAAC;AAEF,OAAM,mBAAmB;EACvB;EACA;EACD,CAAC;AAEF,OAAM,gBAAgB;EACpB;EACA;EACD,CAAC;AAEF,OAAM,mBAAmB;EACvB;EACA;EACD,CAAC;AAEF,OAAM,cAAc;EAClB;EACA;EACD,CAAC;AAEF,OAAM,sBAAsB;EAC1B;EACA;EACD,CAAC;AAEF,OAAM,sBAAsB;EAC1B;EACA;EACD,CAAC;AAEF,OAAM,iBAAiB;EACrB;EACA;EACD,CAAC;AAEF,OAAM,kBAAkB;EACtB;EACA;EACA;EACA;EACD,CAAC;AAEF,sBAAqB;EACnB;EACA;EACA;EACA;EACA,iBAAiB;EAClB,CAAC;AAEF,OAAM,gBAAgB;EAAE;EAAS;EAAgB,CAAC;AAClD,OAAM,qBAAqB;EAAE;EAAS;EAAgB,CAAC;AACvD,OAAM,uBACJ,EACE,MAAM,CACJ;EACE,MAAM;EACN,IAAI;EACJ,eAAe,CAAC,+BAA+B;EAChD,CACF,EACF,EACD,WACD;;AAOH,eAAe,iBAAiB,EAC9B,SACA,iBACuB;CAEvB,MAAM,EAAE,eAAe,eAAe,sBACpC,SAFe,KAAK,KAAK,eAAe,aAAa,CAItD;AAED,KAAI,eAAe;EACjB,MAAM,iBAAiB,WAAW,YAAY,SAAS;AACvD,MAAI,gBAAgB;AAClB,OAAI,CAAC,eAAe,iBAAiB,CACnC,gBAAe,mBAAmB,KAAK;AAEzC;;;CAGJ,MAAM,WAAW,uBAAuB;AACxC,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAShD,eAAe,kBAAkB,EAC/B,SACA,eACA,yBACA,wBACwB;CAExB,MAAM,EAAE,eAAe,sBAAsB,SAD5B,KAAK,KAAK,eAAe,YAAY,CACS;CAC/D,MAAM,6BAA6B,KAAK,UAAU,wBAAwB;CAG1E,MAAM,WAAW,sBAAsB;EACrC,4BAHiC,uBAAuB,SAAS;EAIjE;EACD,CAAC;AACF,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAOhD,eAAe,sBAAsB,EACnC,SACA,2BAC4B;CAE5B,MAAM,EAAE,eAAe,eAAe,sBACpC,SAFe,KAAK,KAAK,yBAAyB,oBAAoB,CAIvE;AAED,KAAI,cAAe;CAEnB,MAAM,WAAW,8BAA8B;AAC/C,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAQhD,eAAe,8BAA8B,EAC3C,SACA,2BACoC;CAKpC,MAAM,EAAE,eAAe,eAAe,sBACpC,SALe,KAAK,KACpB,yBACA,4BACD,CAIA;AAED,KAAI,cAAe;AAEnB,YAAW,gBAAgB,gDAAgD,CAAC;AAC5E,OAAM,6BAA6B,WAAW;;AAOhD,eAAe,gBAAgB,EAC7B,SACA,2BACsB;CAEtB,MAAM,EAAE,eAAe,eAAe,sBACpC,SAFsB,KAAK,KAAK,yBAAyB,cAAc,CAIxE;AAED,KAAI,cAAe;CAEnB,MAAM,WAAW,wBAAwB;AACzC,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAOhD,eAAe,cAAc,EAC3B,SACA,2BACoB;CAEpB,MAAM,EAAE,eAAe,eAAe,sBACpC,SAFoB,KAAK,KAAK,yBAAyB,YAAY,CAIpE;AAED,KAAI,cAAe;CAEnB,MAAM,WAAW,sBAAsB;AACvC,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAOhD,eAAe,sBAAsB,EACnC,SACA,2BAC4B;CAE5B,MAAM,EAAE,eAAe,eAAe,sBACpC,SAFe,KAAK,KAAK,yBAAyB,oBAAoB,CAIvE;AAED,KAAI,cAAe;AAEnB,YAAW,gBAAgB,0BAA0B;AACrD,OAAM,6BAA6B,WAAW;;AAOhD,eAAe,mBAAmB,EAChC,SACA,2BACyB;CAEzB,MAAM,EAAE,eAAe,eAAe,sBACpC,SAFe,KAAK,KAAK,yBAAyB,iBAAiB,CAIpE;AAED,KAAI,cAAe;AAEnB,YAAW,gBAAgB,uBAAuB;AAClD,OAAM,6BAA6B,WAAW;;AAOhD,eAAe,mBAAmB,EAChC,SACA,2BACyB;CAEzB,MAAM,EAAE,eAAe,eAAe,sBACpC,SAFe,KAAK,KAAK,yBAAyB,iBAAiB,CAIpE;AAED,KAAI,cAAe;AAEnB,YAAW,gBAAgB,uBAAuB;AAClD,OAAM,6BAA6B,WAAW;;AAOhD,eAAe,uBAAuB,EACpC,SACA,2BAC6B;CAE7B,MAAM,EAAE,eAAe,eAAe,sBACpC,SAFe,KAAK,KAAK,yBAAyB,qBAAqB,CAIxE;AAED,KAAI,cAAe;AAEnB,YAAW,gBAAgB,2BAA2B;AACtD,OAAM,6BAA6B,WAAW;;;;ACxThD,eAAsB,+BAA+B,YAAoB;AACvE,OAAM,qBACJ,KAAK,YAAY,gBAAgB,EACjC,MAAM,WAAW,kBAAkB,OAAO,CAC3C;AACD,OAAM,qBACJ,KAAK,YAAY,aAAa,EAC9B,MAAM,WAAW,mBAAmB,OAAO,CAC5C;AACD,OAAM,qBACJ,KAAK,YAAY,WAAW,EAC5B,MAAM,WAAW,gBAAgB,CAClC;AACD,OAAM,qBACJ,KAAK,YAAY,mBAAmB,EACpC,MAAM,WAAW,qBAAqB,CACvC;AACD,OAAM,qBACJ,KAAK,YAAY,WAAW,EAC5B,MAAM,WAAW,gBAAgB,CAClC;AACD,OAAM,qBACJ,KAAK,YAAY,YAAY,EAC7B,MAAM,WAAW,eAAe,MAAM,CACvC;AACD,OAAM,qBACJ,KAAK,YAAY,mBAAmB,EACpC,MAAM,WAAW,qBAAqB,CACvC;;AAGH,eAAsB,kCAAkC,YAAoB;AAC1E,OAAM,qBACJ,KAAK,YAAY,qCAAqC,EACtD,MAAM,WAAW,uBAAuB,CACzC;AACD,OAAM,qBACJ,KAAK,YAAY,2BAA2B,EAC5C,MAAM,WAAW,4BAA4B,CAC9C;AACD,OAAM,qBACJ,KAAK,YAAY,uCAAuC,EACxD,MAAM,WAAW,yBAAyB,CAC3C;;AAGH,eAAsB,2BAA2B,YAAoB;AACnE,OAAM,qBACJ,KAAK,YAAY,qBAAqB,EACtC,MAAM,WAAW,gBAAgB,CAClC;AACD,OAAM,qBACJ,KAAK,YAAY,mBAAmB,EACpC,MAAM,WAAW,qBAAqB,CACvC;;AAGH,eAAsB,8BAA8B,YAAoB;AACtE,OAAM,qBACJ,KAAK,YAAY,wBAAwB,EACzC,MAAM,WAAW,0BAA0B,CAC5C;AACD,OAAM,qBACJ,KAAK,YAAY,sBAAsB,EACvC,MAAM,WAAW,wBAAwB,CAC1C;AACD,OAAM,qBACJ,KAAK,YAAY,wBAAwB,EACzC,MAAM,WAAW,wBAAwB,CAC1C;AACD,OAAM,qBACJ,KAAK,YAAY,4BAA4B,EAC7C,MAAM,WAAW,wBAAwB,CAC1C;AACD,OAAM,qBACJ,KAAK,YAAY,sBAAsB,EACvC,MAAM,WAAW,wBAAwB,CAC1C;;AAGH,eAAsB,6BAA6B,YAAoB;AACrE,OAAM,qBACJ,KAAK,YAAY,qBAAqB,EACtC,MAAM,WAAW,uBAAuB,CACzC;;AAGH,eAAsB,iBAAiB,aAAa,QAAQ,KAAK,EAAE;AACjE,OAAM,kCAAkC,WAAW;AACnD,OAAM,2BAA2B,WAAW;AAC5C,OAAM,8BAA8B,WAAW;AAC/C,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,mBAAmB,aAAa,QAAQ,KAAK,EAAE;AACnE,OAAM,qBAAqB,KAAK,YAAY,YAAY,EAAEE,eAAe;AACzE,OAAM,qBAAqB,KAAK,YAAY,YAAY,EAAE,eAAe;AACzE,OAAM,qBAAqB,KAAK,YAAY,YAAY,EAAE,YAAY;AACtE,OAAM,qBACJ,KAAK,YAAY,wBAAwB,EACzC,uBACD;AACD,OAAM,qBACJ,KAAK,YAAY,mBAAmB,EACpC,kBACD;AACD,OAAM,qBACJ,KAAK,YAAY,8BAA8B,EAC/C,4BACD;;AAGH,eAAsB,sBACpB,MAMA,aAAa,QAAQ,KAAK,EAC1B;CACA,MAAM,EAAE,MAAM,KAAK,SAAS,gBAAgB;AAC5C,OAAM,qBAAqB,WAAW,gBAAgB;AACtD,OAAM,qBAAqB,aAAa,eAAe;AACvD,OAAM,qBAAqB,UAAU,cAAc;CACnD,MAAM,cAAc,MAAM,4BAA4B;EACpD;EACA;EACA;EACD,CAAC;AACF,OAAM,uBAAuB,EAAE,MAAM,EAAE,WAAW;AAMlD,OAAM,qBAAqB,0BALF,MAAM,8BAA8B;EAC3D;EACA;EACA;EACD,CAAC,CACoE;AACtE,OAAM,qBAAqB,gBAAgB,YAAY;;AAGzD,eAAsB,aAAa,aAAa,QAAQ,KAAK,EAAE;AAC7D,OAAM,qBACJ,KAAK,YAAY,yCAAyC,EAC1D,+BACD;AACD,OAAM,qBACJ,KAAK,YAAY,aAAa,EAC9B,mBACD;AACD,OAAM,qBACJ,KAAK,YAAY,oBAAoB,EACrC,kBACD;AACD,OAAM,qBACJ,KAAK,YAAY,+BAA+B,EAChD,0BACD;AACD,OAAM,qBACJ,KAAK,YAAY,mCAAmC,EACpD,8BACD;;AAGH,eAAsB,8BACpB,aAAa,QAAQ,KAAK,EAC1B;AACA,OAAM,+BAA+B,WAAW;AAChD,OAAM,iBAAiB,WAAW;AAClC,OAAM,mBAAmB,WAAW;AACpC,OAAM,aAAa,WAAW;;;;AC3MhC,eAAsB,4BAA4B,MAK/C;CACD,MAAM,EAAE,MAAM,KAAK,SAAS,cAAc;AAkB1C,QANiB,oBACf,MAZ4B,MAAM,0BAA0B;EAC5D,OAAO;EACP;EACA;EACD,CAAC,EAC+B,MAAM,0BAA0B;EAC/D,OAAO;EACP;EACA;EACD,CAAC,CAMD;;;;ACzBH,SAAgB,mBACd,OACiB;AAIjB,QAAO;EACL,MAJW,MAAM;EAKjB,aAJkB,MAAM,QAAQ,eAAe;EAK/C,YAJiB,MAAM,QAAQ,cAAc,IAAI,EAAE;EAKpD;;AAGH,SAAgB,oBACd,SACA;AACA,QAAO,QAAQ,IAAI,mBAAmB;;AAGxC,SAAgB,oBAAiD,MAK9D;CACD,MAAM,EAAE,gBAAgB,kBAAkB,WAAW,YAAY;AAYjE,QAFuC,yBAPtB,wBAAwB;EACvC;EACA;EACA;EACA,kBANuB,oBAAoB,QAAQ;EAOpD,CAAC,CAEuE;;AAK3E,eAAsB,yBAEpB,MAMC;CACD,MAAM,EAAE,UAAU,GAAG,aAAa;AAGlC,OAAM,UAAU,UAFY,oBAAoB,SAAS,EAEV,EAC7C,UAAU,SACX,CAAC;;;;;AChCJ,eAAsB,8BAA8B,EAClD,SACA,WACA,YACA,UACA,mBACqB;CACrB,MAAM,EAAE,WAAW,sBAAsB,qBACvC,SACA,kBACD;CACD,MAAM,wBAAwB,kBAAkB,SAAS;CACzD,MAAM,EAAE,WAAW,eAAe,qBAAqB,SAAS,UAAU;CAC1E,MAAM,iBAAiB,WAAW,SAAS;CAC3C,MAAM,aAAa,WAAW,kBAAkB,CAAC,SAAS;CAC1D,MAAM,gBAAgB,KAAK,KAAK,gBAAgB,UAAU;AAG1D,OAAM,uBACJ,SACA,uBACA,gBACA,eANwB,KAAK,KAAK,eAAe,aAAa,CAQ/D;CACD,MAAM,uBAAuB,wBAAwB;EACnD;EACA;EACD,CAAC;CAEF,MAAM,sBAAsB,uBAAuB,qBAAqB;AAExE,OAAM,oBAAoB;EACxB;EACA;EACA,GAAG;EACH,GAAG;EACJ,CAAC;AAEF,sBAAqB;EACnB;EACA;EACA;EACA;EACA;EACD,CAAC;AAEF,OAAM,gBAAgB;EAAE;EAAS;EAAgB,CAAC;AAClD,OAAM,qBAAqB;EAAE;EAAS;EAAgB,CAAC;AACvD,OAAM,uBACJ,EACE,SAAS,CACP;EACE,MAAM;EACN,IAAI;EACJ,eAAe,CAAC,qBAAqB,gBAAgB;EACtD,CACF,EACF,EACD,WACD;;AASH,eAAe,oBAAoB,MAA+B;CAChE,MAAM,EAAE,SAAS,kBAAkB;CAEnC,MAAM,EAAE,eAAe,eAAe,sBACpC,SAFe,KAAK,KAAK,eAAe,aAAa,CAItD;AAED,KAAI,eAAe;EACjB,MAAM,sBAAsB,WAAW,YAAY,SAAS;AAC5D,MAAI,qBAAqB;AACvB,OAAI,CAAC,oBAAoB,iBAAiB,CACxC,qBAAoB,mBAAmB,KAAK;AAE9C;;;CAIJ,MAAM,WAAW,iCAAiC,KAAK;AACvD,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;;;AClGhD,MAAa,UAAU;CACrB,SAAS;CACT,UAAU;CACV,YAAY;CACZ,SAAS;CACT,GAAI;CACL;AAED,MAAa,oBAAoB;CAC/B,SAAS;CACT,UAAU;CACV,YAAY;CACZ,SACE;CACF,GAAI;CACL;AAED,MAAM,iBAA2D;CAC/D,OAAO;CACP,YAAY;CACb;AACD,MAAM,aAAa;AACnB,MAAM,mBAA2C;CAC/C;CACA;CACA,eAAe;CACf,cAAc;CACd,cAAc;CACd;CACD;AAED,MAAM,yBAAuD;CAC3D;CACA;CACA,eAAe;CACf,cAAc;CACd,cAAc;CACd,YAAY;CACZ,QAAQ;CACR,gBAAgB;CAChB,eAAe;CACf,YAAY,EACV,QAAQ,EACN,OAAO,CAAC,SAAS,SAAS,EAC3B,EACF;CACD,gBAAgB;CAChB;CACD;AAED,SAAS,uBAAuB,SAAgC;CAC9D,MAAM,gBAA0B,EAAE;AAClC,MAAK,MAAM,UAAU,SAAS;AAC5B,gBAAc,KAAK,KAAK,OAAO,OAAO;EACtC,MAAM,oBAAoB,OAAO,WAC9B,KAAK,cAAc,UAAU,OAAO,CACpC,QAAQ,WAAW,WAAW,KAAK;AACtC,gBAAc,KAAK,GAAG,kBAAkB;;AAE1C,QAAO;;AAGT,SAAS,2CACP,eACA;CACA,MAAM,sBAAsB,OAAO,KAAK,QAAQ,CAC7C,KAAK,MAAM,UAAU,IAAI,CACzB,KAAK,KAAK;CACb,MAAM,eAAe,OAAO,OAAO,cAAc,MAAM,CAAC,KACrD,UAAU,MAAM,OAClB;CACD,MAAM,gBAAgB,uBAAuB,cAAc,QAAQ;AAEnE,QAAO;EAAC;EAAqB,GAAG;EAAc,GAAG;EAAc;;AAGjE,eAAe,0BAA0B,MAAc,SAAiB;AAItE,QAHyB,MAAM,OAAO,SAAS,EAC7C,QAAQ,cACT,CAAC;;AAQJ,eAAsB,sCACpB,MACA;CACA,MAAM,EAAE,eAAe,WAAW;AA0ClC,OAAM,SAxCwB;EAC5B,WAAW;EACX,OAAO;EACP,OAAO,EACL,oBAAoB,2BACrB;EACD,WAAW;IACR,GAAG,cAAc,aAAa;IAC7B;IACA,QAAQ;IACR,SAAS,CACP,EACE,YAAY,kBACb,CACF;IACF;IACA,GAAG,cAAc,WAAW;IAC3B;IACA,QAAQ;IACR,SAAS;KACP,EACE,KAAK,EACH,SACE,gEACH,EACF;KACD,EACE,KAAK,EACH,SAAS,0DACV,EACF;KACD,EACE,gDACE,wBACH;KACF;IACF;GACF;EACF,EAEsB,KAAK;;AAG9B,eAAsB,gCACpB,MACA;CACA,MAAM,EAAE,eAAe,eAAe,mBAAmB;CAEzD,MAAM,SAAS,2CAA2C,cAAc,CACrE,OAAO,QAAQ,CACf,KAAK,OAAO;AAEf,OAAM,sCAAsC;EAC1C;EACA;EACD,CAAC;AAEF,OAAM,GAAG,UAAUC,OAAK,KAAK,gBAAgB,iBAAiB,EAAE,OAAO;;;;ACxIzE,eAAsB,6CACpB,eACA;AACA,MAAK,MAAM,UAAU,cAAc,cAAc,QAC/C,OAAM,+BAA+B;EACnC,GAAG;EACH;EACD,CAAC;;AAIN,eAAsB,+BACpB,eACA;AACA,OAAM,kCAAkC,cAAc;AACtD,OAAM,mCAAmC,cAAc;AACvD,OAAM,qCAAqC,cAAc;AACzD,OAAM,gCAAgC,cAAc;;AAGtD,eAAsB,8BACpB,MACA;CACA,MAAM,WAAW,8BAA8B,KAAK;CACpD,MAAM,EAAE,SAAS,eAAe;CAEhC,MAAM,EAAE,eAAe,sBAAsB,SADvB,KAAK,KAAK,YAAY,WAAW,CACa;AACpE,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,kCACpB,MACA;CACA,MAAM,WAAW,kCAAkC,KAAK;CACxD,MAAM,EAAE,SAAS,eAAe;CAIhC,MAAM,EAAE,eAAe,sBAAsB,SAF5B,KAAK,KAAK,YAAY,mBAAmB,CAEK;AAE/D,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,iCACpB,MACA;CACA,MAAM,WAAW;CACjB,MAAM,EAAE,SAAS,kBAAkB;CAEnC,MAAM,EAAE,eAAe,sBAAsB,SAD5B,KAAK,KAAK,eAAe,WAAW,CACU;AAE/D,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,8BACpB,MACA;CACA,MAAM,WAAW,8BAA8B,KAAK;CACpD,MAAM,EAAE,SAAS,eAAe;CAIhC,MAAM,EAAE,eAAe,sBAAsB,SAF5B,KAAK,KAAK,YAAY,WAAW,CAEa;AAE/D,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,sCACpB,MACA;CACA,MAAM,EAAE,SAAS,YAAY,uBAAuB;CAGpD,MAAM,EAAE,eAAe,sBAAsB,SAF5B,KAAK,KAAK,YAAY,oBAAoB,CAEI;AAE/D,YAAW,gBAAgB,GAAG;AAE9B,YAAW,qBAAqB;EAC9B,cAAc,CAAC,2BAA2B;EAC1C,iBAAiB;EACjB,YAAY;EACb,CAAC;CAEF,MAAM,2BAA2B,mBAC/B,oBACA,WACD;AAED,YAAW,qBAAqB;EAC9B,iBAAiB,wBAAwB;EACzC,YAAY;EACZ,cAAc,CACZ;GACE,MAAM;GACN,MAAM;GACN,aAAa;GACd,CACF;EACF,CAAC;AAEF,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,uCACpB,MACA;CACA,MAAM,WAAW,wCAAwC,KAAK;CAC9D,MAAM,EAAE,SAAS,eAAe;CAIhC,MAAM,EAAE,eAAe,sBAAsB,SAF5B,KAAK,KAAK,YAAY,qBAAqB,CAEG;AAE/D,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,iCACpB,MACA;CACA,MAAM,WAAW,qCAAqC,KAAK;CAC3D,MAAM,EAAE,SAAS,eAAe;CAIhC,MAAM,EAAE,eAAe,sBAAsB,SAF5B,KAAK,KAAK,YAAY,cAAc,CAEU;AAE/D,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,oCACpB,MACA;CACA,MAAM,WAAW,qCAAqC,KAAK;CAC3D,MAAM,EAAE,SAAS,eAAe;CAIhC,MAAM,EAAE,eAAe,sBAAsB,SAF5B,KAAK,KAAK,YAAY,kBAAkB,CAEM;AAE/D,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,mCACpB,MACA;CACA,MAAM,WAAW,uCAAuC,KAAK;CAC7D,MAAM,EAAE,SAAS,eAAe;CAIhC,MAAM,EAAE,eAAe,sBAAsB,SAF5B,KAAK,KAAK,YAAY,gBAAgB,CAEQ;AAE/D,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,8BACpB,MACA;CACA,MAAM,WAAW,kCAAkC,KAAK;CACxD,MAAM,EAAE,SAAS,eAAe;CAIhC,MAAM,EAAE,eAAe,sBAAsB,SAF5B,KAAK,KAAK,YAAY,WAAW,CAEa;AAE/D,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,gCACpB,MACA;CACA,MAAM,WAAW,oCAAoC,KAAK;CAC1D,MAAM,EAAE,SAAS,eAAe;CAIhC,MAAM,EAAE,eAAe,sBAAsB,SAF5B,KAAK,KAAK,YAAY,aAAa,CAEW;AAE/D,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,gCACpB,MACA;CACA,MAAM,WAAW,oCAAoC,KAAK;CAC1D,MAAM,EAAE,SAAS,eAAe;CAIhC,MAAM,EAAE,eAAe,sBAAsB,SAF5B,KAAK,KAAK,YAAY,aAAa,CAEW;AAE/D,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,kCACpB,MACA;CACA,MAAM,EAAE,WAAW;CACnB,MAAM,sBAAsB,UAAU,OAAO,KAAK;CAClD,MAAM,WAAW,gDAAgD,KAAK;CACtE,MAAM,EAAE,SAAS,eAAe;CAEhC,MAAM,UAAU,KAAK,KAAK,YAAY,oBAAoB;CAG1D,MAAM,EAAE,eAAe,sBAAsB,SAF5B,KAAK,KAAK,SAAS,aAAa,CAEc;AAE/D,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,mCACpB,MACA;CACA,MAAM,EAAE,WAAW;CACnB,MAAM,sBAAsB,UAAU,OAAO,KAAK;CAClD,MAAM,WAAW,kDAAkD,KAAK;CACxE,MAAM,EAAE,SAAS,eAAe;CAEhC,MAAM,UAAU,KAAK,KAAK,YAAY,oBAAoB;CAG1D,MAAM,EAAE,eAAe,sBAAsB,SAF5B,KAAK,KAAK,SAAS,cAAc,CAEa;AAE/D,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,qCACpB,MACA;CACA,MAAM,EAAE,WAAW;CACnB,MAAM,sBAAsB,UAAU,OAAO,KAAK;CAClD,MAAM,WAAW,oDAAoD,KAAK;CAC1E,MAAM,EAAE,SAAS,eAAe;CAEhC,MAAM,UAAU,KAAK,KAAK,YAAY,oBAAoB;CAG1D,MAAM,EAAE,eAAe,sBAAsB,SAF5B,KAAK,KAAK,SAAS,gBAAgB,CAEW;AAE/D,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,gCACpB,MACA;CACA,MAAM,EAAE,WAAW;CACnB,MAAM,sBAAsB,UAAU,OAAO,KAAK;CAClD,MAAM,WAAW,+CAA+C,KAAK;CACrE,MAAM,EAAE,SAAS,eAAe;CAEhC,MAAM,UAAU,KAAK,KAAK,YAAY,oBAAoB;CAI1D,MAAM,EAAE,eAAe,sBAAsB,SAF5B,KAAK,KAAK,SAAS,WAAW,CAEgB;AAE/D,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;;;AClShD,eAAsB,kCACpB,MACA;CACA,MAAM,WAAW;CACjB,MAAM,EAAE,SAAS,mBAAmB;CAIpC,MAAM,EAAE,eAAe,sBAAsB,SAF5B,KAAK,KAAK,gBAAgB,WAAW,CAES;AAE/D,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,2BACpB,MACA;CACA,MAAM,WAAW,2BAA2B,KAAK;CACjD,MAAM,EAAE,SAAS,mBAAmB;CAIpC,MAAM,EAAE,eAAe,sBAAsB,SAF5B,KAAK,KAAK,gBAAgB,WAAW,CAES;AAC/D,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,iCACpB,MACA;CACA,MAAM,WAAW,qCAAqC,KAAK;CAC3D,MAAM,EAAE,SAAS,mBAAmB;CAIpC,MAAM,EAAE,eAAe,sBAAsB,SAF5B,KAAK,KAAK,gBAAgB,aAAa,CAEO;AAE/D,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,2BACpB,MACA;CACA,MAAM,WAAW,+BAA+B,KAAK;CACrD,MAAM,EAAE,SAAS,mBAAmB;CAIpC,MAAM,EAAE,eAAe,sBAAsB,SAF5B,KAAK,KAAK,gBAAgB,WAAW,CAES;AAE/D,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,4BACpB,MACA;CACA,MAAM,EAAE,SAAS,mBAAmB;CACpC,MAAM,WAAW,gCAAgC,KAAK;CAItD,MAAM,EAAE,eAAe,sBAAsB,SAFtB,KAAK,KAAK,gBAAgB,YAAY,CAEQ;AAErE,YAAW,gBAAgB,SAAS;AAEpC,OAAM,6BAA6B,WAAW;;;;ACzDhD,eAAsB,uCACpB,eACA;CACA,MAAM,EAAE,kBAAkB;AAC1B,MAAK,MAAM,UAAU,cAAc,QACjC,OAAM,qCAAqC;EACzC,GAAG;EACH;EACD,CAAC;;AAIN,eAAsB,qCAAqC,EACzD,SACA,QACA,SACA,YACA,mBACA,wBACA,yBACmC;CACnC,MAAM,sBAAsB,UAAU,OAAO,KAAK;CAClD,MAAM,uBAAuB,WAAW,OAAO,KAAK;CACpD,MAAM,WAAW,KAAK,KACpB,YACA,YACA,GAAG,oBAAoB,KACxB;CACD,MAAM,EAAE,eAAe,eAAe,sBACpC,SACA,SACD;AACD,KAAI,CAAC,eAAe;EAClB,MAAM,sBAAsB,6BAA6B;GACvD;GACA;GACA;GACD,CAAC;AACF,MAAI,oBACF,YAAW,gBAAgB,oBAAoB,SAAS,CAAC;;CAG7D,MAAM,8BAA8B,GAAG,yBAAyB,qBAAqB;CACrF,MAAM,kCAAkC,GAAG,wBAAwB,qBAAqB;CAExF,MAAM,wCAAwC,WAAW,sBACtD,sBACC,CAAC,CAAC,kBACC,iBAAiB,CACjB,MACE,oBACC,gBAAgB,SAAS,KAAK,4BACjC,CACN;AACD,KAAI,sCACF,uCAAsC,QAAQ;CAShD,MAAM,oCANgC,WAAW,qBAAqB;EACpE,cAAc,CAAC,4BAA4B;EAC3C,iBAAiB;EACjB,YAAY;EACb,CAAC,CAGC,iBAAiB,CACjB,MAAM,UAAU,MAAM,SAAS,KAAK,4BAA4B,EAC/D,aAAa,CACd,SAAS,CACT,eAAe,CACf,KAAK,WAAW,OAAO,SAAS,CAAC;AAEpC,KAAI,CAAC,kCACH,OAAM,IAAI,MAAM,4CAA4C;CAG9D,IAAI,uCAAuC,WAAW,qBACpD,gCACD;AAED,KAAI,CAAC,qCACH,wCAAuC,WAAW,qBAAqB;EACrE,iBAAiB,wBAAwB;EACzC,YAAY;EACZ,cAAc,CACZ;GACE,MAAM;GACN,MAAM;GACN,aAAa;GACd,CACF;EACF,CAAC;CAGJ,MAAM,4BAA4B,iBAChC,qCACD;AAED,KAAI,CAAC,0BACH,OAAM,IAAI,MAAM,iCAAiC;CAInD,MAAM,yCAAyB,IAAI,KAGhC;AACH,MAAK,MAAM,aAAa,OAAO,WAC7B,KAAI,UAAU,MAAM;EAClB,MAAM,aAAa,GAAG,UAAU,UAAU,KAAK,CAAC;AAChD,yBAAuB,IAAI,YAAY,UAAU;;AAIrD,MAAK,MAAM,QAAQ,mCAAmC;AACpD,MAAI,0BAA0B,YAAY,KAAK,CAAE;EAGjD,MAAM,cADgB,uBAAuB,IAAI,KAAK,EACnB,SAAS,MAAM;AAElD,4BAA0B,UAAU;GAClC;GACA,YAAY,CAAC,EAAE,MAAM,SAAS,EAAE,EAAE,MAAM,UAAU,CAAC;GACnD,YAAY,cACR,CAAC,YAAY,GACb,CACE,sBAAsB,KAAK,WAC3B,IAAE,iCAAiC,KAAK,sBAAsB,IAC/D;GACN,CAAC;;AAIJ,0BAAyB,YAAY,OAAO;AAE5C,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,8BAA8B,EAClD,SACA,GAAG,iBAC0B;CAC7B,MAAM,WAAW;CACjB,MAAM,EAAE,eAAe;CAIvB,MAAM,EAAE,eAAe,sBAAsB,SAF5B,KAAK,KAAK,YAAY,WAAW,CAEa;AAE/D,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,8BAA8B,EAClD,SACA,YACA,WAC6B;CAC7B,MAAM,WAAW;CAEjB,MAAM,WAAW,KAAK,KAAK,YAAY,WAAW;CAElD,MAAM,EAAE,eAAe,eAAe,sBACpC,SACA,SACD;AAED,KAAI,CAAC,eAAe;EAClB,MAAM,4BAA4B,6BAA6B;GAC7D;GACA;GACA;GACD,CAAC;AAEF,MAAI,0BACF,YAAW,gBAAgB,0BAA0B,SAAS,CAAC;MAE/D,YAAW,gBAAgB,SAAS;;AAIxC,OAAM,6BAA6B,WAAW;;AAGhD,SAAS,yBACP,YACA,QACM;CAEN,MAAM,YAAgC,EAAE;AACxC,MAAK,MAAM,aAAa,OAAO,WAC7B,KAAI,MAAM,QAAQ,UAAU,OAAO;OAC5B,MAAM,SAAS,UAAU,OAC5B,KAAI,MAAM,QAAQ,CAAC,UAAU,MAAM,MAAM,EAAE,SAAS,MAAM,KAAK,CAC7D,WAAU,KAAK,EAAE,MAAM,MAAM,MAAM,CAAC;;AAM5C,KAAI,UAAU,WAAW,EAAG;CAG5B,MAAM,oBAAoB,WAAW,aAAa;CAClD,MAAM,aAAuB,EAAE;AAE/B,MAAK,MAAM,SAAS,UAElB,KADqB,IAAI,OAAO,MAAM,MAAM,KAAK,MAAM,IAAI,CAC1C,KAAK,kBAAkB,CACtC,YAAW,KAAK,MAAM,KAAK;AAI/B,KAAI,WAAW,WAAW,EAAG;CAE7B,MAAM,kBAAkB,aAAa,UAAU,OAAO,KAAK,CAAC;CAE5D,MAAM,sBAAsB,WACzB,uBAAuB,CACvB,MACE,eAAe,WAAW,yBAAyB,KAAK,gBAC1D;AAEH,KAAI,qBAAqB;EACvB,MAAM,uBAAuB,oBAC1B,iBAAiB,CACjB,KAAK,gBAAgB,YAAY,SAAS,CAAC;EAE9C,MAAM,oBAAoB,WAAW,QAClC,cAAc,CAAC,qBAAqB,SAAS,UAAU,CACzD;AAED,MAAI,kBAAkB,SAAS,EAC7B,qBAAoB,gBAAgB,kBAAkB;OAGxD,YAAW,qBAAqB;EAC9B,cAAc;EACd,iBAAiB;EAClB,CAAC;;;;AChPN,eAAsB,2CACpB,eACA;AACA,MAAK,MAAM,UAAU,cAAc,cAAc,QAC/C,OAAM,4BAA4B;EAAE,GAAG;EAAe;EAAQ,CAAC;;AAInE,eAAsB,4BACpB,MACA;CACA,MAAM,EACJ,SACA,QACA,SACA,mBACA,cACA,mCACE;CACJ,MAAM,sBAAsB,UAAU,OAAO,KAAK;CAElD,MAAM,2BAA2B,GADJ,WAAW,OAAO,KAAK,CACK;CACzD,MAAM,WAAW,KAAK,KAAK,cAAc,GAAG,oBAAoB,UAAU;CAE1E,MAAM,EAAE,eAAe,eAAe,sBACpC,SACA,SACD;AAED,KAAI,CAAC,eAAe;EAClB,MAAM,4BAA4B,6BAA6B;GAC7D;GACA;GACA;GACD,CAAC;AAEF,MAAI,0BACF,YAAW,gBAAgB,0BAA0B,SAAS,CAAC;MAE/D,YAAW,gBACT,IAAE;;;;oBAIU,yBAAyB;;;UAGnC,IACH;;CAIL,MAAM,cAAc,yBAAyB,KAAK;CAClD,MAAM,eAAe,YAAY,KAAK,UAAU,EAAE,MAAM,EAAE;CAE1D,IAAI,2BAA2B,WAC5B,uBAAuB,CACvB,QAAQ,MAAM,CAAC,EAAE,YAAY,CAAC,CAC9B,MAAM,sBACL,kBACG,oBAAoB,CACpB,SAAS,CACT,SAAS,kBAAkB,CAC/B;AAEH,KAAI,CAAC,yBACH,4BAA2B,WAAW,qBAAqB;EACzD;EACA,iBAAiB;EAClB,CAAC;MACG;AACL,2BAAyB,mBAAmB,kBAAkB;EAC9D,MAAM,uBAAuB,yBAC1B,iBAAiB,CACjB,KAAK,UAAU,MAAM,SAAS,CAAC;AAElC,OAAK,MAAM,QAAQ,YACjB,KAAI,CAAC,qBAAqB,SAAS,KAAK,CACtC,0BAAyB,eAAe,KAAK;;CAKnD,MAAM,eAAe,WAClB,qBAAqB,WAAW,eAAe,CAC/C,MAAM,SAAS;EACd,MAAM,iBAAiB,KAAK,eAAe,CAAC,SAAS;EAErD,MAAM,WADO,KAAK,cAAc,CACV;AACtB,SACE,mBAAmB,cACnB,WAAW,SAAS,SAAS,CAAC,CAAC,SAAS,yBAAyB;GAEnE;AAEJ,KAAI,CAAC,cAAc;AACjB,UAAQ,MACN,qBAAqB,SAAS,6BAA6B,2BAC5D;AACD;;CAGF,MAAM,mBAAmB,aACtB,cAAc,CAAC,GACf,cAAc,WAAW,cAAc;CAE1C,MAAM,gBAAgB,aACnB,qBAAqB,WAAW,eAAe,CAC/C,QAAQ,SAAS;EAChB,MAAM,iBAAiB,KAAK,eAAe,CAAC,SAAS;AACrD,SAAO,mBAAmB,QAAQ,mBAAmB;GACrD,CACD,KAAK,MAAM,EAAE,cAAc,CAAC,GAAG,SAAS,CAAC;CAE5C,MAAM,iBAAiB,KACrB,OAAO,YACP,QAAQ,MAAM,CAAC,aAAa,UAAU,EAAE,QAAQ,GAAG,EAAE,cAAc,CAAC,EACpE,KAAK,MAAM,yBAAyB,GAAG,+BAA+B,CAAC,CACxE;AAED,kBAAiB,cAAc,eAAe;CAE9C,MAAM,qBAAqB;CAC3B,MAAM,iCAAiC;CAEvC,MAAM,qBAAqB,WAAW,sBAAsB,MAC1D,EAAE,iBAAiB,CAAC,MAAM,MAAM,EAAE,SAAS,CAAC,SAAS,mBAAmB,CAAC,CAC1E;AAMD,KAJoC,WACjC,SAAS,CACT,SAAS,mBAAmB,IAEI,CAAC,mBAClC,YAAW,qBAAqB;EAC9B,cAAc,CAAC,mBAAmB;EAClC,iBAAiB;EAClB,CAAC;AAGJ,YAAW,sBAAsB;AACjC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,0BACpB,MACA;CACA,MAAM,EAAE,SAAS,iBAAiB;CAClC,MAAM,WAAW,8BAA8B,KAAK;CAIpD,MAAM,EAAE,eAAe,eAAe,sBACpC,SAHe,KAAK,KAAK,cAAc,yBAAyB,CAKjE;AAED,KAAI,cAAe;AAEnB,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;;;ACzKhD,eAAsB,gBAAgB,MAAkC;CACtE,MAAM,EAAE,SAAS,SAAS,oBAAoB;AAC9C,KAAI,UAAU,EAAG;CAGjB,MAAM,EAAE,eAAe,eAAe,sBACpC,SAFe,KAAK,KAAK,iBAAiB,IAAI,QAAQ,KAAK,CAI5D;AAED,KAAI,cAAe;CAEnB,MAAM,WAAW,0BAA0B,KAAK;AAEhD,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAsB,kCACpB,MACA;CACA,MAAM,EAAE,SAAS,UAAU,oBAAoB;CAG/C,MAAM,EAAE,eAAe,sBAAsB,SAF5B,KAAK,KAAK,iBAAiB,sBAAsB,CAEH;CAE/D,MAAM,WAAW,wBAAwB,KAAK;AAE9C,YAAW,gBAAgB,SAAS;CAEpC,MAAM,2BAA2B,8BAA8B,SAAS;AAExE,YAAW,sBAAsB,yBAAyB;CAE1D,MAAM,2BAA2B,iCAC/B,YACA,kBACD,EAAE,6BAA6B;CAEhC,MAAM,mBADgB,iBAAiB,yBAAyB,EACxB,YAAY,WAAW;CAC/D,MAAM,WAAW,cAAc,SAAS;AACxC,mBAAkB,gBAAgB,SAAS;AAC3C,OAAM,6BAA6B,WAAW;;AAGhD,SAAS,cAAc,cAAwB;CAC7C,MAAM,iBAA2B,EAAE;AAEnC,MAAK,MAAM,WAAW,cAAc;AAClC,MAAI,UAAU,EAAG;AACjB,iBAAe,KAAK,IAAI,UAAU;;AAGpC,QAAO,eAAe,eAAe,KAAK,MAAM,CAAC;;AAGnD,SAAS,8BAA8B,cAAwB;CAC7D,MAAM,UAGA,EAAE;AAER,MAAK,MAAM,WAAW,cAAc;AAClC,MAAI,UAAU,EAAG;EACjB,MAAM,eAAe,CAAC,IAAI,UAAU;EACpC,MAAM,kBAAkB,MAAM,QAAQ;AACtC,UAAQ,KAAK;GACX;GACA;GACD,CAAC;;AAGJ,QAAO;;AAST,eAAsB,mCAAmC,EACvD,UACA,eACA,SACA,mBAC+B;CAC/B,MAAM,qBAAqB;CAC3B,MAAM,iBAAiB;CAEvB,MAAM,EAAE,eAAe,sBAAsB,SAD5B,KAAK,KAAK,iBAAiB,cAAc,CACK;AAC/D,YAAW,gBAAgB,GAAG;CAE9B,MAAM,qBAAqB,SAAS,QAAQ,cAAc;CAC1D,MAAM,qBAAqB,IAAI,SAAS,KAAK,KAAK,CAAC;CACnD,MAAM,oBAAoB,GAAG,mBAAmB,GAAG,mBAAmB;AAEtE,YAAW,qBAAqB;EAC9B,iBAAiB,wBAAwB;EACzC,YAAY;EACZ,cAAc,CACZ;GACE,MAAM;GACN,aAAa;GACd,CACF;EACF,CAAC;AAEF,YAAW,qBAAqB;EAC9B,iBAAiB,wBAAwB;EACzC,YAAY;EACZ,cAAc,CACZ;GACE,MAAM;GACN,aAAa;GACd,CACF;EACF,CAAC;AAEF,OAAM,6BAA6B,WAAW;;AAShD,eAAsB,sBAAsB,EAC1C,SACA,iBACA,UACA,uBAC4B;CAE5B,MAAM,EAAE,eAAe,sBAAsB,SAD5B,KAAK,KAAK,iBAAiB,WAAW,CACQ;AAC/D,YAAW,gBAAgB,GAAG;CAE9B,MAAM,wBAAwB,0BAA0B,SAAS;AAEjE,YAAW,sBAAsB;EAC/B;GACE,cAAc,CAAC,oBAAoB;GACnC,iBAAiB;GAClB;EACD;GACE,cAAc,CAAC,qBAAqB,gBAAgB;GACpD,iBAAiB;GAClB;EACD,GAAG;EACJ,CAAC;AACF,OAAM,6BAA6B,WAAW;;AAGhD,SAAS,0BAA0B,cAAwB;CACzD,MAAM,UAGA,EAAE;AAER,MAAK,MAAM,WAAW,cAAc;AAClC,MAAI,UAAU,EAAG;EACjB,MAAM,eAAe,CAAC,IAAI,UAAU;EACpC,MAAM,kBAAkB,MAAM,QAAQ;AACtC,UAAQ,KAAK;GACX;GACA;GACD,CAAC;;AAGJ,QAAO;;;;;;;;AC1GT,eAAsB,6BACpB,oBACA,SACA;CACA,MAAM,EAAE,MAAM,IAAI,mBAAmB;CACrC,MAAM,EAAE,WAAW,sBAAsB,qBACvC,SACA,kBACD;CACD,MAAM,wBAAwB,kBAAkB,SAAS;CACzD,MAAM,aAAa,kBAAkB,kBAAkB,CAAC,SAAS;CACjE,MAAM,uBAAuB,UAAU,KAAK;CAC5C,MAAM,uBAAuBC,OAC3B,uBACA,qBACD;CACD,MAAM,0BAA0BA,OAAK,mBAAmB,qBAAqB;CAC7E,MAAM,kBAAkBA,OAAK,sBAAsB,WAAW;CAC9D,MAAM,6BAA6B,8BAA8B,KAAK;AACtE,OAAM,uBACJ,SACA,uBACA,sBACA,gBACD;CAED,MAAM,WAAW,KACf,gBACA,IAAI,KAAK,UAAU,CAAC,EACpB,QAAQ,EACR,KAAK,SAAS,CACf;AAED,KAAI,SAAS,WAAW,eAAe,OACrC,OAAM,IAAI,MACR,kGACD;CAGH,MAAM,gBAAgB,KAAK,IAAI,GAAG,SAAS;AAC3C,KAAI,KAAK,KAAK,eAAe,EAAE,UAAU,KAAK,cAC5C,OAAM,IAAI,MACR,mEACD;AAGH,OAAM,gCAAgC;EACpC;EACA;EACA;EACD,CAAC;AAEF,MAAK,MAAM,iBAAiB,gBAAgB;EAC1C,MAAM,EAAE,YAAY;EACpB,MAAM,iBAAiB,IAAI;EAC3B,MAAM,iBAAiBA,OAAK,sBAAsB,eAAe;EACjE,MAAM,oBAAoBA,OAAK,yBAAyB,eAAe;EACvE,MAAM,aAAaA,OAAK,gBAAgB,MAAM;EAC9C,MAAM,eAAeA,OAAK,gBAAgB,QAAQ;EAClD,MAAM,aAAaA,OAAK,gBAAgB,MAAM;EAC9C,MAAM,gBAAgBA,OAAK,YAAY,SAAS;EAChD,MAAM,EAAE,oBAAoB,sBAAsB,iBAChD,cAAc,MACf;EACD,MAAM,iBAAiB,cAAc,MAAM,MAAM,WAAW;EAE5D,MAAM,gBAA4C;GAChD,GAAG;GACH;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACA;GACD;AAGD,QAAM,gCAAgC,cAAc;AACpD,QAAM,kCAAkC,cAAc;AACtD,QAAM,iCAAiC,cAAc;AACrD,QAAM,2BAA2B,cAAc;AAC/C,QAAM,2BAA2B,cAAc;AAC/C,QAAM,4BAA4B,cAAc;AAGhD,QAAM,iCAAiC,cAAc;AACrD,QAAM,8BAA8B,cAAc;AAClD,QAAM,8BAA8B,cAAc;AAClD,QAAM,iCAAiC,cAAc;AACrD,QAAM,gCAAgC,cAAc;AACpD,QAAM,uCAAuC,cAAc;AAC3D,QAAM,gCAAgC,cAAc;AACpD,QAAM,kCAAkC,cAAc;AACtD,QAAM,8BAA8B,cAAc;AAClD,QAAM,sCAAsC,cAAc;AAC1D,QAAM,oCAAoC,cAAc;AACxD,QAAM,mCAAmC,cAAc;AACvD,QAAM,6CAA6C,cAAc;AAGjE,QAAM,8BAA8B,cAAc;AAClD,QAAM,8BAA8B,cAAc;AAClD,QAAM,uCAAuC,cAAc;AAG3D,QAAM,0BAA0B,cAAc;AAC9C,QAAM,2CAA2C,cAAc;AAG/D,QAAM,sCAAsC,cAAc;AAG1D,QAAM,gBAAgB,cAAc;AAGpC,QAAM,kCAAkC,cAAc;;AAIxD,OAAM,mCAAmC;EACvC;EACA;EACA;EACA;EACD,CAAC;AAGF,OAAM,sBAAsB;EAC1B,GAAG;EACH;EACA;EACA;EACD,CAAC;AAEF,OAAM,2BAA2B;EAC/B;EACA;EACA;EACD,CAAC;AAEF,OAAM,uBAAuB;EAAE;EAAS;EAAuB,CAAC;AAEhE,OAAM,4BAA4B;EAAE;EAAS;EAAuB,CAAC;AAErE,OAAM,yBAAyB;EAAE;EAAS;EAAuB,CAAC;AAClE,OAAM,uBACJ,EACE,gBAAgB,CACd;EACE;EACA;EACD,CACF,EACF,EACD,WACD;;AAGH,eAAe,yBAAyB,MAGrC;CACD,MAAM,EAAE,SAAS,0BAA0B;CAC3C,MAAM,aAAa,QAAQ,iBACzBA,OAAK,uBAAuB,uBAAuB,EACnD,0BACA,EAAE,WAAW,MAAM,CACpB;CAED,MAAM,wBAAwB,WAC3B,8BAA8B,mBAAmB,CACjD,gCAAgC,WAAW,uBAAuB;AAErE,MACE,QAAQ,gBAAgB,EAExB,QAAQ,eAAe,WAAW,aAAa,KAAK,sBAAsB,EAE1E,KAAK,eACH,iCAAiC,YAAY,kBAAkB,CAChE,EACD,OAAO,SAAS,EAEhB,KAAK,yBAAyB;EAC5B,MAAM,oBAAoB,SAAS;EAEnC,kBAAkB,oBACf,eAAe,CACf,cAAc,CACd,kBAAkB,CAClB,aAAa;EACjB,EAAE,EACH,SAAS,KAAK,OAAO,CAAC,EAEtB,KAAK,EAAE,MAAM,wBAAwB;EACnC;EACA,cAAc,CAAC,KAAK;EACpB,iBAAiBA,OAAK,mBAAmB,kBAAkB,WAAW;EACvE,EAAE,EAEH,SAAS,EAAE,MAAM,cAAc,sBAAsB;AACnD,aAAW,qBAAqB;GAAE;GAAc;GAAiB,CAAC;AAClE,wBAAsB,WAAW,KAAK;GACtC,CACH;AAED,OAAM,6BAA6B,WAAW;;AAGhD,eAAe,uBAAuB,MAGnC;CACD,MAAM,EAAE,SAAS,0BAA0B;CAC3C,MAAM,aAAa,QAAQ,iBACzBA,OAAK,uBAAuB,qBAAqB,EACjD,wBACA,EAAE,WAAW,MAAM,CACpB;CAED,MAAM,sBAAsB,WACzB,8BAA8B,iBAAiB,CAC/C,gCAAgC,WAAW,uBAAuB;AAErE,MACE,QACG,oBAAoB,sBAAsB,CAC1C,0BAA0B,EAC7B,QAAQ,eAAe,WAAW,aAAa,KAAK,YAAY,EAChE,UAAU,eAAe,WAAW,aAAa,CAAC,EAClD,KAAK,eACH,iCAAiC,YAAY,gBAAgB,CAC9D,EACD,OAAO,SAAS,EAChB,KAAK,yBAAyB;EAC5B,MAAM,oBAAoB,SAAS;EACnC,WAAW,oBAAoB,eAAe,CAAC,cAAc;EAC9D,EAAE,EACH,KAAK,EAAE,MAAM,iBAAiB;EAC5B;EACA,SAAS,UAAU,aAAa;EAChC,kBAAkB,UAAU,kBAAkB,CAAC,aAAa;EAC7D,EAAE,EACH,QAAQ,EAAE,cAAc,SAAS,KAAK,QAAQ,CAAC,EAC/C,KAAK,EAAE,MAAM,SAAS,wBAAwB;EAC5C,MAAM,GAAG,OAAO,WAAW,QAAQ;EAEnC,cAAc,CAAC,GAAG,KAAK,MAAM,OAAO,WAAW,QAAQ,GAAG;EAC1D,iBAAiBA,OAAK,mBAAmB,kBAAkB,QAAQ;EACpE,EAAE,EACH,SAAS,EAAE,MAAM,cAAc,sBAAsB;AACnD,aAAW,qBAAqB;GAC9B;GACA;GACD,CAAC;AACF,sBAAoB,WAAW,KAAK;GACpC,CACH;AACD,OAAM,6BAA6B,WAAW;;AAGhD,eAAe,4BAA4B,MAGxC;CACD,MAAM,EAAE,SAAS,0BAA0B;CAC3C,MAAM,aAAa,QAAQ,iBACzBA,OAAK,uBAAuB,WAAW,EACvC,IACA,EAAE,WAAW,MAAM,CACpB;AACD,MACE,QACG,oBAAoB,sBAAsB,CAC1C,0BAA0B,EAC7B,QAAQ,eAAe,WAAW,aAAa,KAAK,YAAY,EAChE,UAAU,eAAe,WAAW,aAAa,CAAC,EAClD,KAAK,eACH,iCAAiC,YAAY,gBAAgB,CAC9D,EACD,OAAO,SAAS,EAChB,KAAK,yBAAyB;EAC5B,MAAM,oBAAoB,SAAS;EACnC,WAAW,oBAAoB,eAAe,CAAC,cAAc;EAC9D,EAAE,EACH,KAAK,EAAE,MAAM,iBAAiB;EAC5B;EACA,SAAS,UAAU,aAAa;EAChC,kBAAkB,UAAU,kBAAkB,CAAC,aAAa;EAC7D,EAAE,EACH,QAAQ,EAAE,cAAc,SAAS,KAAK,QAAQ,CAAC,EAC/C,KAAK,EAAE,MAAM,SAAS,wBAAwB;EAE5C,cAAc,CAAC,GAAG,KAAK,MAAM,OAAO,WAAW,QAAQ,GAAG;EAC1D,iBAAiB,KAAK,iBAAiB,GAAG,QAAQ;EACnD,EAAE,EACH,SAAS,EAAE,cAAc,sBAAsB;AAC7C,aAAW,qBAAqB;GAC9B;GACA;GACD,CAAC;GACF,CACH;AACD,OAAM,6BAA6B,WAAW;;;AAIhD,eAAe,gCAAgC,EAC7C,oBACA,sBACA,wBAKC;AAGD,OAAM,UAFWA,OAAK,sBAAsB,GAAG,qBAAqB,OAAO,EAC5C,KAAK,UAAU,oBAAoB,MAAM,EAAE,CACzB;;AAGnD,eAAe,2BAA2B,MAIvC;CACD,MAAM,EAAE,SAAS,sBAAsB,kBAAkB;CAIzD,MAAM,EAAE,eAAe,sBAAsB,SAF5BA,OAAK,sBAAsB,WAAW,CAEQ;AAE/D,YAAW,gBAAgB,GAAG;AAC9B,YAAW,sBAAsB,CAC/B,EAAE,iBAAiB,MAAM,cAAc,YAAY,EACnD,EAAE,iBAAiB,uBAAuB,CAC3C,CAAC;AAEF,OAAM,6BAA6B,WAAW;;AAGhD,eAAe,sCACb,MACA;CACA,MAAM,EAAE,SAAS,yBAAyB;CAC1C,MAAM,kBAAkB,UAAU;AAClC,KAAI,mBAAmB,EAAG;CAE1B,MAAM,wBAAwB,IAAI;CAClC,MAAM,yBAAyB,IAAI;AAEnC,KAAI,0BAA0B,uBAAwB;CAEtD,MAAM,yBAAyBA,OAC7B,sBACA,uBACD;CACD,MAAM,wBAAwBA,OAC5B,sBACA,sBACD;AAKD,KAAI,CAJ6B,MAAM,gBACrC,uBACD,CAE8B;CAO/B,MAAM,wBAL6B,MAAM,QAAQ,wBAAwB;EACvE,eAAe;EACf,WAAW;EACZ,CAAC,EAGC,QAAQ,WAAW,OAAO,QAAQ,CAAC,CACnC,KAAK,EAAE,MAAM,kBAAkB;EAC9B;EACA;EACA,cAAc,SAAS,wBAAwB,WAAW;EAC3D,EAAE;AAEL,MAAK,MAAM,EAAE,MAAM,kBAAkB,sBAAsB;EACzD,MAAM,8BAA8BA,OAClC,uBACA,cACA,KACD;EACD,MAAM,+BAA+BA,OACnC,wBACA,cACA,KACD;EACD,MAAM,6BAA6B,MAAM,WACvC,6BACD;EACD,MAAM,4BAA4B,MAAM,WACtC,4BACD;AACD,MAAI,8BAA8B,CAAC,2BAA2B;AAC5D,WAAQ,IACN,oBAAoBA,OAAK,cAAc,KAAK,CAAC,oCAC9C;AACD,SAAM,MAAMA,OAAK,uBAAuB,aAAa,EAAE,EACrD,WAAW,MACZ,CAAC;AACF,SAAM,SAAS,8BAA8B,4BAA4B;;;;;;AC5e/E,SAAgB,kBAAkB,WAAmC;AACnE,QAAO,aAAa,UAAU,OAAO;;AAGvC,SAAgB,uBAAuB,WAAmC;AACxE,QACE,UAAU,QAAQ,SAAS,SAAS,IACpC,CAAC,UAAU,OAAO,QAAQ,qBAAqB,GAAG,CAAC,MAAM,aAAa;;AAI1E,SAAgB,uBAAuB,WAAmC;AACxE,QAAO,UAAU,QAAQ,SAAS,eAAe;;;;;;;ACGnD,SAAgB,qBAAqB,EACnC,SACA,gBACA,WAC0B;CAC1B,MAAM,sBAAsBC,OAAK,KAAK,gBAAgB,WAAW;CAGjE,IAAI,kBAAkB,QAAQ,cAAc,oBAAoB;AAEhE,KAAI,CAAC,gBACH,mBAAkB,QAAQ,iBAAiB,qBAAqB,GAAG;KAEnE,iBAAgB,gBAAgB,GAAG;AAGrC,iBAAgB,sBACd,QAAQ,KAAK,EAAE,eAAe,iBAAiB,uBAAuB;EACpE,cAAc,CACZ,gBACI,GAAG,gBAAgB,MAAM,kBACzB,gBACL;EACD;EACD,EAAE,CACJ;;;;ACrBH,eAAsB,wBACpB,cACmB;AAEnB,KAAI,CADoB,MAAM,WAAW,aAAa,CAEpD,OAAM,cAAc,cAAc,gBAAgB;CAEpD,MAAM,eAAe,MAAM,aAAa,aAAa;AACrD,QAAO,eAAe,MAAM,aAAa;;AAG3C,SAAS,uBACP,aAAiC,EAAE,EACnC,aAAiC,EAAE,EACf;AACpB,QAAO,KAAK,OAAO,YAAY,WAAW,EAAE,SAAS,KAAK,KAAK,CAAC,CAAC;;AAGnE,SAAS,kBACP,YAA2B,EAAE,EAC7B,YAA2B,EAAE,EAC7B;AACA,QAAO,KACL,WACA,QAAQ,EAAE,WAAW,CAAC,aAAa,MAAM,IAAI,WAAW,KAAK,OAAO,CAAC,CAAC,CAAC,EACvE,OAAO,UAAU,EACjB,SAAS,KAAK,OAAO,CAAC,CACvB;;AAIH,eAAsB,uBACpB,cACA,YACA;CACA,MAAM,eAAe,KAAK,YAAY,2BAA2B;CACjE,MAAM,mBAAmB,MAAM,wBAAwB,aAAa;CAEpE,MAAM,kBAA4B;EAChC,GAAG;EACH,GAAG;EACH,WAAW,MAAM,iBAAiB,WAAW,aAAa,UAAU;EACpE,gBAAgB,uBACd,iBAAiB,gBACjB,aAAa,eACd;EACD,SAAS,uBACP,iBAAiB,SACjB,aAAa,QACd;EACD,MAAM,uBAAuB,iBAAiB,MAAM,aAAa,KAAK;EACtE,YAAY,uBACV,iBAAiB,YACjB,aAAa,WACd;EACD,WAAW,uBACT,iBAAiB,WACjB,aAAa,UACd;EACD,QAAQ,kBAAkB,iBAAiB,QAAQ,aAAa,OAAO;EACxE;AACD,OAAM,cAAc,cAAc,gBAAgB;AAClD,QAAO;;;;AC1ET,eAAsB,kCACpB,MACA;CACA,MAAM,EAAE,SAAS,eAAe,gBAAgB,SAAS,kBACvD;AAEF,OAAMC,gBAAc;EAClB;EACA;EACA;EACD,CAAC;AAEF,OAAMC,oBAAkB;EACtB;EACA;EACA;EACD,CAAC;AAEF,OAAMC,kBAAgB;EACpB;EACA;EACA;EACA;EACA;EACD,CAAC;;AAGJ,eAAeF,gBAAc,GAI1B;CACD,MAAM,WAAW;CACjB,MAAM,EAAE,eAAe,eAAe,sBACpC,EAAE,SACF,KAAK,KAAK,EAAE,SAAS,WAAW,CACjC;AACD,KAAI,cAAe;AACnB,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAeC,oBAAkB,GAI9B;CACD,MAAM,WAAW,2BAA2B,EAAE;CAC9C,MAAM,EAAE,eAAe,eAAe,sBACpC,EAAE,SACF,KAAK,KAAK,EAAE,SAAS,eAAe,CACrC;AACD,KAAI,cAAe;AACnB,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAeC,kBAAgB,GAM5B;CACD,MAAM,WAAW,yBAAyB,EAAE;CAC5C,MAAM,EAAE,eAAe,eAAe,sBACpC,EAAE,SACF,KAAK,KAAK,EAAE,SAAS,aAAa,CACnC;AACD,KAAI,cAAe;AACnB,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;;;ACrEhD,eAAsB,qCACpB,MACA;CACA,MAAM,EAAE,SAAS,eAAe,eAAe,gBAAgB,YAC7D;AAEF,OAAM,cAAc;EAClB;EACA;EACA;EACD,CAAC;AAEF,OAAM,kBAAkB;EACtB;EACA;EACA;EACD,CAAC;AAEF,OAAM,gBAAgB;EACpB;EACA;EACA;EACA;EACA;EACD,CAAC;AAEF,OAAM,mBAAmB;EAAE;EAAS;EAAS,CAAC;AAE9C,OAAM,eAAe;EAAE;EAAS;EAAS,CAAC;;AAG5C,eAAe,cAAc,GAI1B;CACD,MAAM,WAAW;CACjB,MAAM,EAAE,eAAe,eAAe,sBACpC,EAAE,SACF,KAAK,KAAK,EAAE,SAAS,WAAW,CACjC;AACD,KAAI,cAAe;AACnB,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAe,kBAAkB,GAI9B;CACD,MAAM,WAAW,8BAA8B,EAAE;CACjD,MAAM,EAAE,eAAe,eAAe,sBACpC,EAAE,SACF,KAAK,KAAK,EAAE,SAAS,eAAe,CACrC;AACD,KAAI,cAAe;AACnB,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAe,gBAAgB,GAM5B;CACD,MAAM,WAAW,4BAA4B,EAAE;CAC/C,MAAM,EAAE,eAAe,eAAe,sBACpC,EAAE,SACF,KAAK,KAAK,EAAE,SAAS,aAAa,CACnC;AACD,KAAI,cAAe;AACnB,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAe,eAAe,GAA0C;CACtE,MAAM,WAAW,4BAA4B;CAC7C,MAAM,EAAE,eAAe,eAAe,sBACpC,EAAE,SACF,KAAK,KAAK,EAAE,SAAS,YAAY,CAClC;AACD,KAAI,cAAe;AACnB,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;AAGhD,eAAe,mBAAmB,GAA0C;CAC1E,MAAM,WAAW,gCAAgC;CACjD,MAAM,EAAE,eAAe,eAAe,sBACpC,EAAE,SACF,KAAK,KAAK,EAAE,SAAS,gBAAgB,CACtC;AACD,KAAI,cAAe;AACnB,YAAW,gBAAgB,SAAS;AACpC,OAAM,6BAA6B,WAAW;;;;AC1FhD,eAAsB,yBAAyB,MAM5C;CACD,MAAM,EACJ,SACA,eACA,eACA,eACA,kBACE;CACJ,MAAM,gBAAgB,UAAU,cAAc;CAC9C,MAAM,gBAAgB,UAAU,cAAc;CAC9C,MAAM,iBAAiB,WAAW,cAAc;CAChD,MAAM,EAAE,WAAW,kBAAkB,qBACnC,SACA,aACD;CACD,MAAM,aAAa,cAAc,kBAAkB,CAAC,SAAS;CAC7D,MAAM,oBAAoB,cAAc,SAAS;CACjD,MAAM,UAAU,KAAK,KAAK,mBAAmB,cAAc;AAC3D,OAAM,uBAAuB,SAAS,mBAAmB,QAAQ;AAEjE,KAAI,kBAAkB,YACpB,OAAM,kCAAkC;EACtC;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACD,CAAC;KAEF,OAAM,qCAAqC;EACzC;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACD,CAAC;AAGJ,MAAK,MAAM,gBAAgB,cACzB,OAAM,0BAA0B;EAC9B;EACA;EACA;EACA;EACA;EACD,CAAC;AAEJ,OAAM,uBACJ,EACE,YAAY,CAAC;EAAE,MAAM;EAAe,IAAI;EAAe,CAAC,EACzD,EACD,WACD;;AAGH,eAAe,0BAA0B,GAMtC;CACD,MAAM,EACJ,SACA,mBACA,cACA,eACA,kBACE;CACJ,MAAM,WAAW;CAEjB,MAAM,EAAE,eAAe,eAAe,sBACpC,SAFe,KAAK,KAAK,mBAAmB,GAAG,aAAa,KAAK,CAIlE;AACD,KAAI,CAAC,cACH,YAAW,gBAAgB,SAAS;CAEtC,MAAM,OAAO,GAAG,cAAc;CAC9B,MAAM,kBAAkB,KAAK,KAAK,cAAc,cAAc;CAE9D,MAAM,qBAAqB;CAE3B,IAAI,uBAAuB,wBACzB,YACA,mBACD;AAED,KAAI,CAAC,sBAAsB;AACzB,aAAW,gBAAgB,SAAS;AACpC,yBAAuB,wBACrB,YACA,mBACD;;AAGH,KAAI,CAAC,qBACH,OAAM,IAAI,MACR,gDAAgD,aAAa,KAC9D;AAUH,KAAI,CAPsB,WACvB,uBAAuB,CACvB,SAAS,sBACR,kBAAkB,iBAAiB,CAAC,KAAK,MAAM,EAAE,SAAS,CAAC,CAC5D,CACA,MAAM,MAAM,MAAM,KAAK,CAGxB,YAAW,qBAAqB;EAC9B,cAAc,CAAC,KAAK;EACpB;EACD,CAAC;AAOJ,KAAI,CAJkB,qBACnB,aAAa,CACb,KAAK,MAAM,EAAE,SAAS,CAAC,CAEP,SAAS,KAAK,CAC/B,sBAAqB,WAAW,KAAK;AAGvC,OAAM,6BAA6B,WAAW;;AAGhD,SAAS,wBAAwB,YAAwB,MAAc;AACrE,QAAO,WACJ,qBAAqB,GAAG,WAAW,kBAAkB,CACrD,SAAS,MAAM,EAAE,qBAAqB,GAAG,WAAW,oBAAoB,CAAC,CACzE,MAAM,MAAM,EAAE,SAAS,KAAK,KAAK,EAChC,qBAAqB,GAAG,WAAW,uBAAuB,CAC3D,GAAG,EAAE;;;;AClJV,eAAsB,wBAAwB,MAG5B;CAChB,MAAM,EAAE,cAAc,YAAY;CAClC,MAAM,gBAAgB,UAAU,aAAa;CAC7C,MAAM,iBAAiB,WAAW,aAAa;CAC/C,MAAM,gBAAgB,UAAU,aAAa;CAC7C,MAAM,EAAE,WAAW,iBAAiB,qBAClC,SACA,YACD;CACD,MAAM,mBAAmB,aAAa,SAAS;CAC/C,MAAM,aAAa,aAAa,kBAAkB,CAAC,SAAS;CAC5D,MAAM,cAAc,KAAK,KAAK,kBAAkB,cAAc;AAC9D,OAAM,uBAAuB,SAAS,kBAAkB,YAAY;AAGpE,OAAM,0BAA0B,SAAS,aAAa;EACpD;EACA;EACD,CAAC;AACF,OAAM,wBAAwB,SAAS,YAAY;AAGnD,OAAM,wBAAwB,SAAS,aAAa;EAClD;EACA;EACD,CAAC;AAEF,OAAM,uBAAuB;EAAE;EAAS,cAAc;EAAkB,CAAC;AACzE,OAAM,uBACJ,EACE,WAAW,CACT;EACE,MAAM;EACN,IAAI;EACL,CACF,EACF,EACD,WACD;;AAGH,eAAe,0BACb,SACA,SACA,GACA;CAEA,MAAM,EAAE,eAAe,eAAe,sBACpC,SAFe,KAAK,KAAK,SAAS,WAAW,CAI9C;AACD,KAAI,cAAe;AACnB,YAAW,gBAAgB,0BAA0B,EAAE,CAAC;AACxD,OAAM,6BAA6B,WAAW;;AAGhD,eAAe,wBAAwB,SAAkB,SAAiB;CAExE,MAAM,EAAE,eAAe,eAAe,sBACpC,SAFe,KAAK,KAAK,SAAS,SAAS,CAI5C;AACD,KAAI,cAAe;AACnB,YAAW,gBAAgB,yBAAyB,CAAC;AACrD,OAAM,6BAA6B,WAAW;;AAGhD,eAAe,wBACb,SACA,SACA,GACA;CAGA,MAAM,SAAS,sBAAsB,SADlB,KAAK,KAAK,SAAS,YAAY,CACO;AACzD,KAAI,CAAC,OAAO,cACV,QAAO,WAAW,gBAAgB,6BAA6B,EAAE,CAAC;CAKpE,MAAM,YAAY,sBAAsB,SADlB,KAAK,KAAK,SAAS,eAAe,CACO;AAC/D,KAAI,CAAC,UAAU,eAAe;AAC5B,YAAU,WAAW,gBAAgB,gCAAgC,EAAE,CAAC;AACxE,QAAM,6BAA6B,UAAU,WAAW;;;AAI5D,eAAsB,uBAAuB,MAG1C;CACD,MAAM,EAAE,SAAS,iBAAiB;CAClC,MAAM,EAAE,eAAe,sBACrB,SACA,KAAK,KAAK,cAAc,WAAW,CACpC;CACD,MAAM,sBAAsB,KAC1B,WAAW,uBAAuB,EAClC,KAAK,sBACH,kBAAkB,oBAAoB,EAAE,SAAS,CAClD,EACD,OAAO,SAAS,CACjB;CAED,MAAM,qBAAqB,KACzB,QAAQ,oBAAoB,aAAa,CAAC,0BAA0B,EACpE,QAAQ,eAAe,WAAW,aAAa,KAAK,WAAW,EAC/D,UAAU,eAAe,WAAW,aAAa,CAAC,EAClD,KAAK,eACH,WACG,YAAY,CACZ,MAAM,MAAM,EAAE,cAAc,EAAE,SAAS,CAAC,SAAS,eAAe,CAAC,CACrE,EACD,OAAO,SAAS,EAChB,KAAK,sBAAsB;EACzB,MAAM,iBAAiB,gBAAgB;EACvC,aAAa,iBACV,eAAe,CACf,cAAc,CACd,aAAa;EACjB,EAAE,EACH,QAAQ,EAAE,WAAW,CAAC,oBAAoB,SAAS,KAAK,CAAC,EACzD,KAAK,EAAE,MAAM,mBAAmB;EAC9B,iBAAiB;EACjB,iBAAiB,KAAK,YAAY;EACnC,EAAE,CACJ;AACD,YAAW,sBAAsB,mBAAmB;AACpD,OAAM,6BAA6B,WAAW"}