@elaraai/east 0.0.1-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (251) hide show
  1. package/LICENSE.md +682 -0
  2. package/README.md +276 -0
  3. package/dist/src/analyze.d.ts +95 -0
  4. package/dist/src/analyze.d.ts.map +1 -0
  5. package/dist/src/analyze.js +1110 -0
  6. package/dist/src/analyze.js.map +1 -0
  7. package/dist/src/ast.d.ts +263 -0
  8. package/dist/src/ast.d.ts.map +1 -0
  9. package/dist/src/ast.js +151 -0
  10. package/dist/src/ast.js.map +1 -0
  11. package/dist/src/ast_to_ir.d.ts +24 -0
  12. package/dist/src/ast_to_ir.d.ts.map +1 -0
  13. package/dist/src/ast_to_ir.js +834 -0
  14. package/dist/src/ast_to_ir.js.map +1 -0
  15. package/dist/src/builtins.d.ts +18 -0
  16. package/dist/src/builtins.d.ts.map +1 -0
  17. package/dist/src/builtins.js +1105 -0
  18. package/dist/src/builtins.js.map +1 -0
  19. package/dist/src/comparison.d.ts +28 -0
  20. package/dist/src/comparison.d.ts.map +1 -0
  21. package/dist/src/comparison.js +1017 -0
  22. package/dist/src/comparison.js.map +1 -0
  23. package/dist/src/compile.d.ts +22 -0
  24. package/dist/src/compile.d.ts.map +1 -0
  25. package/dist/src/compile.js +3260 -0
  26. package/dist/src/compile.js.map +1 -0
  27. package/dist/src/containers/ref.d.ts +106 -0
  28. package/dist/src/containers/ref.d.ts.map +1 -0
  29. package/dist/src/containers/ref.js +100 -0
  30. package/dist/src/containers/ref.js.map +1 -0
  31. package/dist/src/containers/sortedmap.d.ts +165 -0
  32. package/dist/src/containers/sortedmap.d.ts.map +1 -0
  33. package/dist/src/containers/sortedmap.js +237 -0
  34. package/dist/src/containers/sortedmap.js.map +1 -0
  35. package/dist/src/containers/sortedset.d.ts +185 -0
  36. package/dist/src/containers/sortedset.d.ts.map +1 -0
  37. package/dist/src/containers/sortedset.js +312 -0
  38. package/dist/src/containers/sortedset.js.map +1 -0
  39. package/dist/src/containers/variant.d.ts +131 -0
  40. package/dist/src/containers/variant.d.ts.map +1 -0
  41. package/dist/src/containers/variant.js +68 -0
  42. package/dist/src/containers/variant.js.map +1 -0
  43. package/dist/src/datetime_format/parse.d.ts +50 -0
  44. package/dist/src/datetime_format/parse.d.ts.map +1 -0
  45. package/dist/src/datetime_format/parse.js +908 -0
  46. package/dist/src/datetime_format/parse.js.map +1 -0
  47. package/dist/src/datetime_format/print.d.ts +35 -0
  48. package/dist/src/datetime_format/print.d.ts.map +1 -0
  49. package/dist/src/datetime_format/print.js +157 -0
  50. package/dist/src/datetime_format/print.js.map +1 -0
  51. package/dist/src/datetime_format/tokenize.d.ts +76 -0
  52. package/dist/src/datetime_format/tokenize.d.ts.map +1 -0
  53. package/dist/src/datetime_format/tokenize.js +271 -0
  54. package/dist/src/datetime_format/tokenize.js.map +1 -0
  55. package/dist/src/datetime_format/types.d.ts +99 -0
  56. package/dist/src/datetime_format/types.d.ts.map +1 -0
  57. package/dist/src/datetime_format/types.js +103 -0
  58. package/dist/src/datetime_format/types.js.map +1 -0
  59. package/dist/src/datetime_format/validate.d.ts +51 -0
  60. package/dist/src/datetime_format/validate.d.ts.map +1 -0
  61. package/dist/src/datetime_format/validate.js +208 -0
  62. package/dist/src/datetime_format/validate.js.map +1 -0
  63. package/dist/src/default.d.ts +21 -0
  64. package/dist/src/default.d.ts.map +1 -0
  65. package/dist/src/default.js +82 -0
  66. package/dist/src/default.js.map +1 -0
  67. package/dist/src/eastir.d.ts +33 -0
  68. package/dist/src/eastir.d.ts.map +1 -0
  69. package/dist/src/eastir.js +92 -0
  70. package/dist/src/eastir.js.map +1 -0
  71. package/dist/src/error.d.ts +13 -0
  72. package/dist/src/error.d.ts.map +1 -0
  73. package/dist/src/error.js +8 -0
  74. package/dist/src/error.js.map +1 -0
  75. package/dist/src/expr/array.d.ts +1711 -0
  76. package/dist/src/expr/array.d.ts.map +1 -0
  77. package/dist/src/expr/array.js +1805 -0
  78. package/dist/src/expr/array.js.map +1 -0
  79. package/dist/src/expr/ast.d.ts +17 -0
  80. package/dist/src/expr/ast.d.ts.map +1 -0
  81. package/dist/src/expr/ast.js +302 -0
  82. package/dist/src/expr/ast.js.map +1 -0
  83. package/dist/src/expr/blob.d.ts +141 -0
  84. package/dist/src/expr/blob.d.ts.map +1 -0
  85. package/dist/src/expr/blob.js +198 -0
  86. package/dist/src/expr/blob.js.map +1 -0
  87. package/dist/src/expr/block.d.ts +201 -0
  88. package/dist/src/expr/block.d.ts.map +1 -0
  89. package/dist/src/expr/block.js +1505 -0
  90. package/dist/src/expr/block.js.map +1 -0
  91. package/dist/src/expr/boolean.d.ts +207 -0
  92. package/dist/src/expr/boolean.d.ts.map +1 -0
  93. package/dist/src/expr/boolean.js +261 -0
  94. package/dist/src/expr/boolean.js.map +1 -0
  95. package/dist/src/expr/datetime.d.ts +544 -0
  96. package/dist/src/expr/datetime.d.ts.map +1 -0
  97. package/dist/src/expr/datetime.js +980 -0
  98. package/dist/src/expr/datetime.js.map +1 -0
  99. package/dist/src/expr/dict.d.ts +1242 -0
  100. package/dist/src/expr/dict.d.ts.map +1 -0
  101. package/dist/src/expr/dict.js +1492 -0
  102. package/dist/src/expr/dict.js.map +1 -0
  103. package/dist/src/expr/expr.d.ts +95 -0
  104. package/dist/src/expr/expr.d.ts.map +1 -0
  105. package/dist/src/expr/expr.js +171 -0
  106. package/dist/src/expr/expr.js.map +1 -0
  107. package/dist/src/expr/float.d.ts +357 -0
  108. package/dist/src/expr/float.d.ts.map +1 -0
  109. package/dist/src/expr/float.js +637 -0
  110. package/dist/src/expr/float.js.map +1 -0
  111. package/dist/src/expr/function.d.ts +46 -0
  112. package/dist/src/expr/function.d.ts.map +1 -0
  113. package/dist/src/expr/function.js +58 -0
  114. package/dist/src/expr/function.js.map +1 -0
  115. package/dist/src/expr/index.d.ts +450 -0
  116. package/dist/src/expr/index.d.ts.map +1 -0
  117. package/dist/src/expr/index.js +423 -0
  118. package/dist/src/expr/index.js.map +1 -0
  119. package/dist/src/expr/integer.d.ts +256 -0
  120. package/dist/src/expr/integer.d.ts.map +1 -0
  121. package/dist/src/expr/integer.js +311 -0
  122. package/dist/src/expr/integer.js.map +1 -0
  123. package/dist/src/expr/libs/array.d.ts +106 -0
  124. package/dist/src/expr/libs/array.d.ts.map +1 -0
  125. package/dist/src/expr/libs/array.js +140 -0
  126. package/dist/src/expr/libs/array.js.map +1 -0
  127. package/dist/src/expr/libs/blob.d.ts +42 -0
  128. package/dist/src/expr/libs/blob.d.ts.map +1 -0
  129. package/dist/src/expr/libs/blob.js +70 -0
  130. package/dist/src/expr/libs/blob.js.map +1 -0
  131. package/dist/src/expr/libs/datetime.d.ts +479 -0
  132. package/dist/src/expr/libs/datetime.d.ts.map +1 -0
  133. package/dist/src/expr/libs/datetime.js +624 -0
  134. package/dist/src/expr/libs/datetime.js.map +1 -0
  135. package/dist/src/expr/libs/dict.d.ts +66 -0
  136. package/dist/src/expr/libs/dict.d.ts.map +1 -0
  137. package/dist/src/expr/libs/dict.js +77 -0
  138. package/dist/src/expr/libs/dict.js.map +1 -0
  139. package/dist/src/expr/libs/float.d.ts +299 -0
  140. package/dist/src/expr/libs/float.d.ts.map +1 -0
  141. package/dist/src/expr/libs/float.js +564 -0
  142. package/dist/src/expr/libs/float.js.map +1 -0
  143. package/dist/src/expr/libs/integer.d.ts +228 -0
  144. package/dist/src/expr/libs/integer.d.ts.map +1 -0
  145. package/dist/src/expr/libs/integer.js +398 -0
  146. package/dist/src/expr/libs/integer.js.map +1 -0
  147. package/dist/src/expr/libs/set.d.ts +59 -0
  148. package/dist/src/expr/libs/set.d.ts.map +1 -0
  149. package/dist/src/expr/libs/set.js +69 -0
  150. package/dist/src/expr/libs/set.js.map +1 -0
  151. package/dist/src/expr/libs/string.d.ts +71 -0
  152. package/dist/src/expr/libs/string.d.ts.map +1 -0
  153. package/dist/src/expr/libs/string.js +75 -0
  154. package/dist/src/expr/libs/string.js.map +1 -0
  155. package/dist/src/expr/never.d.ts +15 -0
  156. package/dist/src/expr/never.d.ts.map +1 -0
  157. package/dist/src/expr/never.js +12 -0
  158. package/dist/src/expr/never.js.map +1 -0
  159. package/dist/src/expr/null.d.ts +15 -0
  160. package/dist/src/expr/null.d.ts.map +1 -0
  161. package/dist/src/expr/null.js +12 -0
  162. package/dist/src/expr/null.js.map +1 -0
  163. package/dist/src/expr/ref.d.ts +103 -0
  164. package/dist/src/expr/ref.d.ts.map +1 -0
  165. package/dist/src/expr/ref.js +131 -0
  166. package/dist/src/expr/ref.js.map +1 -0
  167. package/dist/src/expr/regex_validation.d.ts +25 -0
  168. package/dist/src/expr/regex_validation.d.ts.map +1 -0
  169. package/dist/src/expr/regex_validation.js +130 -0
  170. package/dist/src/expr/regex_validation.js.map +1 -0
  171. package/dist/src/expr/set.d.ts +1071 -0
  172. package/dist/src/expr/set.d.ts.map +1 -0
  173. package/dist/src/expr/set.js +1137 -0
  174. package/dist/src/expr/set.js.map +1 -0
  175. package/dist/src/expr/string.d.ts +414 -0
  176. package/dist/src/expr/string.d.ts.map +1 -0
  177. package/dist/src/expr/string.js +683 -0
  178. package/dist/src/expr/string.js.map +1 -0
  179. package/dist/src/expr/struct.d.ts +48 -0
  180. package/dist/src/expr/struct.d.ts.map +1 -0
  181. package/dist/src/expr/struct.js +65 -0
  182. package/dist/src/expr/struct.js.map +1 -0
  183. package/dist/src/expr/types.d.ts +68 -0
  184. package/dist/src/expr/types.d.ts.map +1 -0
  185. package/dist/src/expr/types.js +6 -0
  186. package/dist/src/expr/types.js.map +1 -0
  187. package/dist/src/expr/variant.d.ts +137 -0
  188. package/dist/src/expr/variant.d.ts.map +1 -0
  189. package/dist/src/expr/variant.js +105 -0
  190. package/dist/src/expr/variant.js.map +1 -0
  191. package/dist/src/fuzz.d.ts +80 -0
  192. package/dist/src/fuzz.d.ts.map +1 -0
  193. package/dist/src/fuzz.js +300 -0
  194. package/dist/src/fuzz.js.map +1 -0
  195. package/dist/src/index.d.ts +21 -0
  196. package/dist/src/index.d.ts.map +1 -0
  197. package/dist/src/index.js +21 -0
  198. package/dist/src/index.js.map +1 -0
  199. package/dist/src/internal.d.ts +36 -0
  200. package/dist/src/internal.d.ts.map +1 -0
  201. package/dist/src/internal.js +11 -0
  202. package/dist/src/internal.js.map +1 -0
  203. package/dist/src/ir.d.ts +1571 -0
  204. package/dist/src/ir.d.ts.map +1 -0
  205. package/dist/src/ir.js +56 -0
  206. package/dist/src/ir.js.map +1 -0
  207. package/dist/src/location.d.ts +48 -0
  208. package/dist/src/location.d.ts.map +1 -0
  209. package/dist/src/location.js +62 -0
  210. package/dist/src/location.js.map +1 -0
  211. package/dist/src/platform.d.ts +21 -0
  212. package/dist/src/platform.d.ts.map +1 -0
  213. package/dist/src/platform.js +8 -0
  214. package/dist/src/platform.js.map +1 -0
  215. package/dist/src/serialization/beast.d.ts +39 -0
  216. package/dist/src/serialization/beast.d.ts.map +1 -0
  217. package/dist/src/serialization/beast.js +555 -0
  218. package/dist/src/serialization/beast.js.map +1 -0
  219. package/dist/src/serialization/beast2-stream.d.ts +38 -0
  220. package/dist/src/serialization/beast2-stream.d.ts.map +1 -0
  221. package/dist/src/serialization/beast2-stream.js +665 -0
  222. package/dist/src/serialization/beast2-stream.js.map +1 -0
  223. package/dist/src/serialization/beast2.d.ts +41 -0
  224. package/dist/src/serialization/beast2.d.ts.map +1 -0
  225. package/dist/src/serialization/beast2.js +489 -0
  226. package/dist/src/serialization/beast2.js.map +1 -0
  227. package/dist/src/serialization/binary-utils.d.ts +151 -0
  228. package/dist/src/serialization/binary-utils.d.ts.map +1 -0
  229. package/dist/src/serialization/binary-utils.js +929 -0
  230. package/dist/src/serialization/binary-utils.js.map +1 -0
  231. package/dist/src/serialization/east.d.ts +84 -0
  232. package/dist/src/serialization/east.d.ts.map +1 -0
  233. package/dist/src/serialization/east.js +1802 -0
  234. package/dist/src/serialization/east.js.map +1 -0
  235. package/dist/src/serialization/index.d.ts +11 -0
  236. package/dist/src/serialization/index.d.ts.map +1 -0
  237. package/dist/src/serialization/index.js +12 -0
  238. package/dist/src/serialization/index.js.map +1 -0
  239. package/dist/src/serialization/json.d.ts +36 -0
  240. package/dist/src/serialization/json.d.ts.map +1 -0
  241. package/dist/src/serialization/json.js +849 -0
  242. package/dist/src/serialization/json.js.map +1 -0
  243. package/dist/src/type_of_type.d.ts +115 -0
  244. package/dist/src/type_of_type.d.ts.map +1 -0
  245. package/dist/src/type_of_type.js +362 -0
  246. package/dist/src/type_of_type.js.map +1 -0
  247. package/dist/src/types.d.ts +648 -0
  248. package/dist/src/types.d.ts.map +1 -0
  249. package/dist/src/types.js +1631 -0
  250. package/dist/src/types.js.map +1 -0
  251. package/package.json +87 -0
@@ -0,0 +1,665 @@
1
+ /**
2
+ * Copyright (c) 2025 Elara AI Pty Ltd
3
+ * Dual-licensed under AGPL-3.0 and commercial license. See LICENSE for details.
4
+ */
5
+ import {} from "../types.js";
6
+ import { isVariant, variant } from "../containers/variant.js";
7
+ import { BufferWriter, StreamBufferReader } from "./binary-utils.js";
8
+ import { encodeBeast2ValueToBufferFor, MAGIC_BYTES } from "./beast2.js";
9
+ import { EastTypeValueType, toEastTypeValue } from "../type_of_type.js";
10
+ import { equalFor } from "../comparison.js";
11
+ import { printFor } from "./east.js";
12
+ import { ref } from "../containers/ref.js";
13
+ const printTypeValue = printFor(EastTypeValueType);
14
+ const isTypeValueEqual = equalFor(EastTypeValueType);
15
+ // Chunk size for streaming (64KB - good balance of memory vs throughput)
16
+ const CHUNK_SIZE = 64 * 1024;
17
+ /**
18
+ * Determines if a type can be encoded synchronously without needing to yield.
19
+ * Sync types are small and bounded - they can't cause excessive buffer growth.
20
+ */
21
+ function isSyncType(type) {
22
+ switch (type.type) {
23
+ case "Never":
24
+ case "Null":
25
+ case "Boolean":
26
+ case "Integer":
27
+ case "Float":
28
+ case "DateTime":
29
+ return true;
30
+ case "String":
31
+ case "Blob":
32
+ case "Ref":
33
+ case "Array":
34
+ case "Set":
35
+ case "Dict":
36
+ // These can be large
37
+ return false;
38
+ case "Struct":
39
+ // Sync only if all fields are sync
40
+ return type.value.every(({ type }) => isSyncType(type));
41
+ case "Variant":
42
+ // Sync only if all cases are sync
43
+ return type.value.every(({ type }) => isSyncType(type));
44
+ case "Recursive":
45
+ return false;
46
+ case "Function":
47
+ return true; // Will throw, but no need to yield
48
+ default:
49
+ return false;
50
+ }
51
+ }
52
+ function encodeBeast2ValueToStreamFor(type, typeCtx = []) {
53
+ // Convert EastType to EastTypeValue if necessary
54
+ if (!isVariant(type)) {
55
+ type = toEastTypeValue(type);
56
+ }
57
+ // For fully sync types, use the sync encoder directly
58
+ if (isSyncType(type)) {
59
+ const syncEncoder = encodeBeast2ValueToBufferFor(type);
60
+ return function* (value, writer, ctx) {
61
+ syncEncoder(value, writer, ctx);
62
+ };
63
+ }
64
+ // Async types - need generator logic
65
+ if (type.type === "String") {
66
+ return function* (value, writer, _ctx) {
67
+ writer.writeStringUtf8Varint(value);
68
+ if (writer.size >= CHUNK_SIZE) {
69
+ yield writer.pop();
70
+ }
71
+ };
72
+ }
73
+ else if (type.type === "Blob") {
74
+ return function* (value, writer, _ctx) {
75
+ // Write length prefix
76
+ writer.writeVarint(value.length);
77
+ // Check if we should chunk the blob
78
+ if (value.length > CHUNK_SIZE) {
79
+ // Large blob - flush buffer first, then send blob in chunks
80
+ if (writer.size > 0) {
81
+ yield writer.pop();
82
+ }
83
+ let offset = 0;
84
+ while (offset < value.length) {
85
+ const chunkSize = Math.min(CHUNK_SIZE, value.length - offset);
86
+ const chunk = value.subarray(offset, offset + chunkSize);
87
+ writer.writeBytes(chunk);
88
+ yield writer.pop();
89
+ offset += chunkSize;
90
+ }
91
+ }
92
+ else {
93
+ // Small blob - write it all
94
+ writer.writeBytes(value);
95
+ if (writer.size >= CHUNK_SIZE) {
96
+ yield writer.pop();
97
+ }
98
+ }
99
+ };
100
+ }
101
+ else if (type.type === "Ref") {
102
+ // Branch on whether value type is sync
103
+ if (isSyncType(type.value)) {
104
+ const syncValueEncoder = encodeBeast2ValueToBufferFor(type.value);
105
+ return function* (value, writer, ctx = { refs: new Map() }) {
106
+ // Check for backreference
107
+ if (ctx.refs.has(value)) {
108
+ const offset = ctx.refs.get(value);
109
+ writer.writeVarint(writer.currentOffset - offset);
110
+ return;
111
+ }
112
+ // Write inline marker and register
113
+ writer.writeVarint(0);
114
+ ctx.refs.set(value, writer.currentOffset);
115
+ syncValueEncoder(value.value, writer, ctx);
116
+ };
117
+ }
118
+ else {
119
+ // Use late binding for async value type
120
+ let valueGen;
121
+ const ret = function* (value, writer, ctx = { refs: new Map() }) {
122
+ // Check for backreference
123
+ if (ctx.refs.has(value)) {
124
+ const offset = ctx.refs.get(value);
125
+ writer.writeVarint(writer.currentOffset - offset);
126
+ return;
127
+ }
128
+ // Write inline marker and register
129
+ writer.writeVarint(0);
130
+ ctx.refs.set(value, writer.currentOffset);
131
+ yield* valueGen(value.value, writer, ctx);
132
+ };
133
+ typeCtx.push(ret);
134
+ valueGen = encodeBeast2ValueToStreamFor(type.value, typeCtx);
135
+ typeCtx.pop();
136
+ return ret;
137
+ }
138
+ }
139
+ else if (type.type === "Array") {
140
+ // Branch on whether value type is sync
141
+ if (isSyncType(type.value)) {
142
+ const syncValueEncoder = encodeBeast2ValueToBufferFor(type.value);
143
+ return function* (value, writer, ctx = { refs: new Map() }) {
144
+ // Check for backreference
145
+ if (ctx.refs.has(value)) {
146
+ const offset = ctx.refs.get(value);
147
+ writer.writeVarint(writer.currentOffset - offset);
148
+ return;
149
+ }
150
+ // Write inline marker and register
151
+ writer.writeVarint(0);
152
+ ctx.refs.set(value, writer.currentOffset);
153
+ // Encode length
154
+ writer.writeVarint(value.length);
155
+ for (const item of value) {
156
+ syncValueEncoder(item, writer, ctx);
157
+ if (writer.size >= CHUNK_SIZE) {
158
+ yield writer.pop();
159
+ }
160
+ }
161
+ };
162
+ }
163
+ else {
164
+ // Use late binding for async value type
165
+ let valueGen;
166
+ const ret = function* (value, writer, ctx = { refs: new Map() }) {
167
+ // Check for backreference
168
+ if (ctx.refs.has(value)) {
169
+ const offset = ctx.refs.get(value);
170
+ writer.writeVarint(writer.currentOffset - offset);
171
+ return;
172
+ }
173
+ // Write inline marker and register
174
+ writer.writeVarint(0);
175
+ ctx.refs.set(value, writer.currentOffset);
176
+ // Encode length
177
+ writer.writeVarint(value.length);
178
+ for (const item of value) {
179
+ yield* valueGen(item, writer, ctx);
180
+ if (writer.size >= CHUNK_SIZE) {
181
+ yield writer.pop();
182
+ }
183
+ }
184
+ };
185
+ typeCtx.push(ret);
186
+ valueGen = encodeBeast2ValueToStreamFor(type.value, typeCtx);
187
+ typeCtx.pop();
188
+ return ret;
189
+ }
190
+ }
191
+ else if (type.type === "Set") {
192
+ // Keys are assumed small - always use sync
193
+ const syncKeyEncoder = encodeBeast2ValueToBufferFor(type.value);
194
+ return function* (value, writer, ctx = { refs: new Map() }) {
195
+ // Check for backreference
196
+ if (ctx.refs.has(value)) {
197
+ const offset = ctx.refs.get(value);
198
+ writer.writeVarint(writer.currentOffset - offset);
199
+ return;
200
+ }
201
+ // Write inline marker and register
202
+ writer.writeVarint(0);
203
+ ctx.refs.set(value, writer.currentOffset);
204
+ // Encode length
205
+ writer.writeVarint(value.size);
206
+ for (const key of value) {
207
+ syncKeyEncoder(key, writer, ctx);
208
+ if (writer.size >= CHUNK_SIZE) {
209
+ yield writer.pop();
210
+ }
211
+ }
212
+ };
213
+ }
214
+ else if (type.type === "Dict") {
215
+ // Keys are assumed small - always use sync. Check value type.
216
+ const syncKeyEncoder = encodeBeast2ValueToBufferFor(type.value.key);
217
+ if (isSyncType(type.value.value)) {
218
+ const syncValueEncoder = encodeBeast2ValueToBufferFor(type.value.value);
219
+ return function* (value, writer, ctx = { refs: new Map() }) {
220
+ // Check for backreference
221
+ if (ctx.refs.has(value)) {
222
+ const offset = ctx.refs.get(value);
223
+ writer.writeVarint(writer.currentOffset - offset);
224
+ return;
225
+ }
226
+ // Write inline marker and register
227
+ writer.writeVarint(0);
228
+ ctx.refs.set(value, writer.currentOffset);
229
+ // Encode length
230
+ writer.writeVarint(value.size);
231
+ for (const [k, v] of value) {
232
+ syncKeyEncoder(k, writer, ctx);
233
+ syncValueEncoder(v, writer, ctx);
234
+ if (writer.size >= CHUNK_SIZE) {
235
+ yield writer.pop();
236
+ }
237
+ }
238
+ };
239
+ }
240
+ else {
241
+ // Use late binding for async value type
242
+ let valueGen;
243
+ const ret = function* (value, writer, ctx = { refs: new Map() }) {
244
+ // Check for backreference
245
+ if (ctx.refs.has(value)) {
246
+ const offset = ctx.refs.get(value);
247
+ writer.writeVarint(writer.currentOffset - offset);
248
+ return;
249
+ }
250
+ // Write inline marker and register
251
+ writer.writeVarint(0);
252
+ ctx.refs.set(value, writer.currentOffset);
253
+ // Encode length
254
+ writer.writeVarint(value.size);
255
+ for (const [k, v] of value) {
256
+ syncKeyEncoder(k, writer, ctx);
257
+ yield* valueGen(v, writer, ctx);
258
+ if (writer.size >= CHUNK_SIZE) {
259
+ yield writer.pop();
260
+ }
261
+ }
262
+ };
263
+ typeCtx.push(ret);
264
+ valueGen = encodeBeast2ValueToStreamFor(type.value.value, typeCtx);
265
+ typeCtx.pop();
266
+ return ret;
267
+ }
268
+ }
269
+ else if (type.type === "Struct") {
270
+ // Annotate each field with sync flag and appropriate encoder
271
+ const fieldInfo = [];
272
+ const ret = function* (value, writer, _ctx) {
273
+ // Struct is immutable - no backreference checking needed
274
+ // Encode fields directly
275
+ for (const { name, isSync, encoder } of fieldInfo) {
276
+ if (isSync) {
277
+ encoder(value[name], writer);
278
+ }
279
+ else {
280
+ yield* encoder(value[name], writer);
281
+ }
282
+ }
283
+ };
284
+ typeCtx.push(ret);
285
+ for (const { name, type: fieldType } of type.value)
286
+ fieldInfo.push({
287
+ name,
288
+ isSync: isSyncType(fieldType),
289
+ encoder: isSyncType(fieldType)
290
+ ? encodeBeast2ValueToBufferFor(fieldType)
291
+ : encodeBeast2ValueToStreamFor(fieldType, typeCtx),
292
+ });
293
+ typeCtx.pop();
294
+ return ret;
295
+ }
296
+ else if (type.type === "Variant") {
297
+ // Annotate each case with sync flag and appropriate encoder
298
+ const caseInfo = {};
299
+ const ret = function* (value, writer, _ctx) {
300
+ // Variant is immutable - no backreference checking needed
301
+ // Encode tag and value directly
302
+ const tag = value.type;
303
+ const { tagIndex, isSync, encoder } = caseInfo[tag]; // Assume valid input
304
+ writer.writeVarint(tagIndex);
305
+ if (isSync) {
306
+ encoder(value.value, writer);
307
+ }
308
+ else {
309
+ yield* encoder(value.value, writer);
310
+ }
311
+ };
312
+ typeCtx.push(ret);
313
+ let i = 0;
314
+ for (const { name, type: caseType } of type.value) {
315
+ caseInfo[name] = {
316
+ tagIndex: i,
317
+ isSync: isSyncType(caseType),
318
+ encoder: isSyncType(caseType)
319
+ ? encodeBeast2ValueToBufferFor(caseType)
320
+ : encodeBeast2ValueToStreamFor(caseType, typeCtx),
321
+ };
322
+ i += 1;
323
+ }
324
+ typeCtx.pop();
325
+ return ret;
326
+ }
327
+ else if (type.type === "Recursive") {
328
+ // Type stack lookup: index from the end
329
+ const targetEncoder = typeCtx[typeCtx.length - Number(type.value)];
330
+ if (!targetEncoder) {
331
+ throw new Error(`Recursive type depth ${type.value} exceeds type context stack size ${typeCtx.length}`);
332
+ }
333
+ return targetEncoder;
334
+ }
335
+ else {
336
+ // Should never reach here - all types handled above
337
+ throw new Error(`Unhandled type: ${type.type}`);
338
+ }
339
+ }
340
+ const typeEncoder = encodeBeast2ValueToBufferFor(EastTypeValueType);
341
+ export function encodeBeast2ToStreamFor(type) {
342
+ // Convert EastType to EastTypeValue if necessary
343
+ if (!isVariant(type)) {
344
+ type = toEastTypeValue(type);
345
+ }
346
+ const valueGen = encodeBeast2ValueToStreamFor(type);
347
+ return (value) => {
348
+ let writer;
349
+ let generator;
350
+ let ctx;
351
+ return new ReadableStream({
352
+ start(controller) {
353
+ // Initialize writer and write header
354
+ writer = new BufferWriter();
355
+ writer.writeBytes(MAGIC_BYTES);
356
+ // Write type schema
357
+ typeEncoder(type, writer, { refs: new Map() });
358
+ // Check if header exceeded chunk size (rare)
359
+ if (writer.size >= CHUNK_SIZE) {
360
+ controller.enqueue(writer.pop());
361
+ }
362
+ // Create context for value encoding
363
+ ctx = { refs: new Map() };
364
+ // Create value generator
365
+ generator = valueGen(value, writer, ctx);
366
+ },
367
+ pull(controller) {
368
+ while (true) {
369
+ const { done, value: chunk } = generator.next();
370
+ if (done) {
371
+ // Flush any remaining data
372
+ if (writer.size > 0) {
373
+ controller.enqueue(writer.pop());
374
+ }
375
+ controller.close();
376
+ return;
377
+ }
378
+ if (chunk !== undefined) {
379
+ controller.enqueue(chunk);
380
+ return; // Backpressure - wait for next pull()
381
+ }
382
+ // chunk is void from yield* delegation - keep looping
383
+ }
384
+ },
385
+ });
386
+ };
387
+ }
388
+ /**
389
+ * Create a streaming decoder function for a given Beast v2 type.
390
+ * Factory pattern: analyzes type once, returns async decoder.
391
+ */
392
+ function decodeBeast2ValueFromStreamFor(type, typeCtx = []) {
393
+ if (type.type === "Never") {
394
+ return (_reader, _ctx) => { throw new Error(`Cannot decode Never type`); };
395
+ }
396
+ else if (type.type === "Null") {
397
+ return (_reader, _ctx) => null;
398
+ }
399
+ else if (type.type === "Boolean") {
400
+ return async (reader, _ctx) => {
401
+ const byte = await reader.readByte();
402
+ return byte !== 0;
403
+ };
404
+ }
405
+ else if (type.type === "Integer") {
406
+ return (reader, _ctx) => reader.readZigzag();
407
+ }
408
+ else if (type.type === "Float") {
409
+ return (reader, _ctx) => reader.readFloat64LE();
410
+ }
411
+ else if (type.type === "String") {
412
+ return (reader, _ctx) => reader.readStringUtf8Varint();
413
+ }
414
+ else if (type.type === "DateTime") {
415
+ return (reader, _ctx) => {
416
+ const millis = reader.readZigzag();
417
+ if (millis instanceof Promise) {
418
+ return millis.then(m => new Date(Number(m)));
419
+ }
420
+ return new Date(Number(millis));
421
+ };
422
+ }
423
+ else if (type.type === "Blob") {
424
+ return async (reader, _ctx) => {
425
+ const length = await reader.readVarint();
426
+ const blob = new Uint8Array(length);
427
+ await reader.readBytes(blob);
428
+ return blob;
429
+ };
430
+ }
431
+ else if (type.type === "Ref") {
432
+ // Use late binding for element decoder
433
+ let elemDecoder;
434
+ const ret = async (reader, ctx = { refs: new Map() }) => {
435
+ const startOffset = reader.position;
436
+ const refOrZero = await reader.readVarint();
437
+ // Check if this is a backreference
438
+ if (refOrZero > 0) {
439
+ const targetOffset = startOffset - refOrZero;
440
+ if (!ctx.refs.has(targetOffset)) {
441
+ throw new Error(`Undefined backreference at offset ${startOffset}, target ${targetOffset}`);
442
+ }
443
+ return ctx.refs.get(targetOffset);
444
+ }
445
+ // Inline ref
446
+ const result = ref(undefined);
447
+ ctx.refs.set(startOffset, result);
448
+ result.value = await elemDecoder(reader, ctx);
449
+ return result;
450
+ };
451
+ typeCtx.push(ret);
452
+ elemDecoder = decodeBeast2ValueFromStreamFor(type.value, typeCtx);
453
+ typeCtx.pop();
454
+ return ret;
455
+ }
456
+ else if (type.type === "Array") {
457
+ // Use late binding for element decoder
458
+ let elemDecoder;
459
+ const ret = async (reader, ctx = { refs: new Map() }) => {
460
+ const startOffset = reader.position;
461
+ const refOrZero = await reader.readVarint();
462
+ // Check if this is a backreference
463
+ if (refOrZero > 0) {
464
+ const targetOffset = startOffset - refOrZero;
465
+ if (!ctx.refs.has(targetOffset)) {
466
+ throw new Error(`Undefined backreference at offset ${startOffset}, target ${targetOffset}`);
467
+ }
468
+ return ctx.refs.get(targetOffset);
469
+ }
470
+ // Inline array
471
+ const result = [];
472
+ ctx.refs.set(startOffset, result);
473
+ const length = await reader.readVarint();
474
+ for (let i = 0; i < length; i++) {
475
+ result.push(await elemDecoder(reader, ctx));
476
+ }
477
+ return result;
478
+ };
479
+ typeCtx.push(ret);
480
+ elemDecoder = decodeBeast2ValueFromStreamFor(type.value, typeCtx);
481
+ typeCtx.pop();
482
+ return ret;
483
+ }
484
+ else if (type.type === "Set") {
485
+ // Keys are standalone - no need for late binding
486
+ const keyDecoder = decodeBeast2ValueFromStreamFor(type.value, []);
487
+ return async (reader, ctx = { refs: new Map() }) => {
488
+ const startOffset = reader.position;
489
+ const refOrZero = await reader.readVarint();
490
+ // Check if this is a backreference
491
+ if (refOrZero > 0) {
492
+ const targetOffset = startOffset - refOrZero;
493
+ if (!ctx.refs.has(targetOffset)) {
494
+ throw new Error(`Undefined backreference at offset ${startOffset}, target ${targetOffset}`);
495
+ }
496
+ return ctx.refs.get(targetOffset);
497
+ }
498
+ // Inline set
499
+ const result = new Set();
500
+ ctx.refs.set(startOffset, result);
501
+ const length = await reader.readVarint();
502
+ for (let i = 0; i < length; i++) {
503
+ result.add(await keyDecoder(reader));
504
+ }
505
+ return result;
506
+ };
507
+ }
508
+ else if (type.type === "Dict") {
509
+ // Keys are standalone, values use late binding
510
+ const keyDecoder = decodeBeast2ValueFromStreamFor(type.value.key, []);
511
+ let valueDecoder;
512
+ const ret = async (reader, ctx = { refs: new Map() }) => {
513
+ const startOffset = reader.position;
514
+ const refOrZero = await reader.readVarint();
515
+ // Check if this is a backreference
516
+ if (refOrZero > 0) {
517
+ const targetOffset = startOffset - refOrZero;
518
+ if (!ctx.refs.has(targetOffset)) {
519
+ throw new Error(`Undefined backreference at offset ${startOffset}, target ${targetOffset}`);
520
+ }
521
+ return ctx.refs.get(targetOffset);
522
+ }
523
+ // Inline dict
524
+ const result = new Map();
525
+ ctx.refs.set(startOffset, result);
526
+ const length = await reader.readVarint();
527
+ for (let i = 0; i < length; i++) {
528
+ const key = await keyDecoder(reader);
529
+ const value = await valueDecoder(reader, ctx);
530
+ result.set(key, value);
531
+ }
532
+ return result;
533
+ };
534
+ typeCtx.push(ret);
535
+ valueDecoder = decodeBeast2ValueFromStreamFor(type.value.value, typeCtx);
536
+ typeCtx.pop();
537
+ return ret;
538
+ }
539
+ else if (type.type === "Struct") {
540
+ // Struct is immutable - fields use typeCtx for recursive types
541
+ let fieldDecoders = [];
542
+ const ret = async (reader, _ctx) => {
543
+ // Struct is immutable - no backreference checking needed
544
+ const result = {};
545
+ for (const [key, decoder] of fieldDecoders) {
546
+ result[key] = await decoder(reader);
547
+ }
548
+ return result;
549
+ };
550
+ typeCtx.push(ret);
551
+ for (const { name, type: fieldType } of type.value) {
552
+ fieldDecoders.push([name, decodeBeast2ValueFromStreamFor(fieldType, typeCtx)]);
553
+ }
554
+ ;
555
+ typeCtx.pop();
556
+ return ret;
557
+ }
558
+ else if (type.type === "Variant") {
559
+ // Variant is immutable - cases use typeCtx for recursive types
560
+ let caseDecoders = [];
561
+ const ret = async (reader, _ctx) => {
562
+ // Variant is immutable - no backreference checking needed
563
+ const tagIndex = await reader.readVarint();
564
+ if (tagIndex >= caseDecoders.length) {
565
+ throw new Error(`Invalid variant tag ${tagIndex}`);
566
+ }
567
+ const [caseName, caseDecoder] = caseDecoders[tagIndex];
568
+ const value = await caseDecoder(reader);
569
+ return variant(caseName, value);
570
+ };
571
+ typeCtx.push(ret);
572
+ for (const { name, type: caseType } of type.value) {
573
+ caseDecoders.push([name, decodeBeast2ValueFromStreamFor(caseType, typeCtx)]);
574
+ }
575
+ typeCtx.pop();
576
+ return ret;
577
+ }
578
+ else if (type.type === "Recursive") {
579
+ // Type stack lookup: index from the end
580
+ const targetDecoder = typeCtx[typeCtx.length - Number(type.value)];
581
+ if (!targetDecoder) {
582
+ throw new Error(`Recursive type depth ${type.value} exceeds type context stack size ${typeCtx.length}`);
583
+ }
584
+ return targetDecoder;
585
+ }
586
+ else if (type.type === "Function") {
587
+ return (_reader, _ctx) => { throw new Error(`Functions cannot be deserialized`); };
588
+ }
589
+ else {
590
+ throw new Error(`Unhandled type ${type.type}`);
591
+ }
592
+ }
593
+ /**
594
+ * Internal helper to decode a value from a stream.
595
+ * Uses the streaming decoder factory with context threading.
596
+ */
597
+ async function decodeBeast2ValueFromStream(type, reader) {
598
+ const decoder = decodeBeast2ValueFromStreamFor(type);
599
+ const ctx = { refs: new Map() };
600
+ return await decoder(reader, ctx);
601
+ }
602
+ const streamingTypeDecode = decodeBeast2ValueFromStreamFor(EastTypeValueType);
603
+ /**
604
+ * Decode a Beast v2 format stream to an East value.
605
+ * Reads the type schema from the stream and returns both type and value.
606
+ *
607
+ * @param stream The input stream to decode
608
+ * @returns Promise resolving to { type, value }
609
+ */
610
+ export async function decodeBeast2FromStream(stream) {
611
+ const reader = new StreamBufferReader(stream);
612
+ try {
613
+ // Verify magic bytes
614
+ for (let i = 0; i < MAGIC_BYTES.length; i++) {
615
+ const byte = await reader.readByte();
616
+ if (byte !== MAGIC_BYTES[i]) {
617
+ throw new Error(`Invalid Beast v2 magic bytes at offset ${i}: expected 0x${MAGIC_BYTES[i].toString(16)}, got 0x${byte.toString(16)}`);
618
+ }
619
+ }
620
+ // Decode type schema directly from stream
621
+ const type = await streamingTypeDecode(reader, { refs: new Map() });
622
+ // Decode value
623
+ const value = await decodeBeast2ValueFromStream(type, reader);
624
+ return { type, value };
625
+ }
626
+ finally {
627
+ await reader.release();
628
+ }
629
+ }
630
+ /**
631
+ * Create a streaming decoder for a specific Beast v2 type.
632
+ * Factory pattern: the type is verified once, returns async decoder.
633
+ *
634
+ * @param type The expected type to decode
635
+ * @returns A function that takes a stream and returns a Promise of the decoded value
636
+ */
637
+ export function decodeBeast2FromStreamFor(type) {
638
+ const valueDecoder = decodeBeast2ValueFromStreamFor(type);
639
+ return async (stream) => {
640
+ const reader = new StreamBufferReader(stream);
641
+ try {
642
+ // Verify magic bytes
643
+ for (let i = 0; i < MAGIC_BYTES.length; i++) {
644
+ const byte = await reader.readByte();
645
+ if (byte !== MAGIC_BYTES[i]) {
646
+ throw new Error(`Invalid Beast v2 magic bytes at offset ${i}: expected 0x${MAGIC_BYTES[i].toString(16)}, got 0x${byte.toString(16)}`);
647
+ }
648
+ }
649
+ // Decode type schema directly from stream
650
+ const decodedType = await streamingTypeDecode(reader, { refs: new Map() });
651
+ // Verify type matches expected type
652
+ if (!isTypeValueEqual(decodedType, type)) {
653
+ throw new Error(`Type mismatch: expected ${printTypeValue(type)}, got ${printTypeValue(decodedType)}`);
654
+ }
655
+ // Decode value with context
656
+ const ctx = { refs: new Map() };
657
+ const value = await valueDecoder(reader, ctx);
658
+ return value;
659
+ }
660
+ finally {
661
+ await reader.release();
662
+ }
663
+ };
664
+ }
665
+ //# sourceMappingURL=beast2-stream.js.map