@sachitv/avro-typescript 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/_dnt.polyfills.d.ts +11 -0
- package/esm/_dnt.polyfills.js +15 -0
- package/esm/avro_reader.d.ts +156 -0
- package/esm/avro_reader.js +201 -0
- package/esm/avro_writer.d.ts +75 -0
- package/esm/avro_writer.js +105 -0
- package/esm/internal/collections/array_utils.d.ts +4 -0
- package/esm/internal/collections/array_utils.js +18 -0
- package/esm/internal/collections/circular_buffer.d.ts +48 -0
- package/esm/internal/collections/circular_buffer.js +129 -0
- package/esm/internal/crypto/md5.d.ts +12 -0
- package/esm/internal/crypto/md5.js +158 -0
- package/esm/internal/varint.d.ts +6 -0
- package/esm/internal/varint.js +30 -0
- package/esm/mod.d.ts +77 -0
- package/esm/mod.js +73 -0
- package/esm/package.json +3 -0
- package/esm/rpc/definitions/message_definition.d.ts +34 -0
- package/esm/rpc/definitions/message_definition.js +105 -0
- package/esm/rpc/definitions/protocol_definitions.d.ts +197 -0
- package/esm/rpc/definitions/protocol_definitions.js +1 -0
- package/esm/rpc/message_endpoint/base.d.ts +43 -0
- package/esm/rpc/message_endpoint/base.js +81 -0
- package/esm/rpc/message_endpoint/emitter.d.ts +96 -0
- package/esm/rpc/message_endpoint/emitter.js +245 -0
- package/esm/rpc/message_endpoint/helpers.d.ts +37 -0
- package/esm/rpc/message_endpoint/helpers.js +68 -0
- package/esm/rpc/message_endpoint/listener.d.ts +37 -0
- package/esm/rpc/message_endpoint/listener.js +212 -0
- package/esm/rpc/protocol/frame_assembler.d.ts +11 -0
- package/esm/rpc/protocol/frame_assembler.js +77 -0
- package/esm/rpc/protocol/protocol_helpers.d.ts +59 -0
- package/esm/rpc/protocol/protocol_helpers.js +123 -0
- package/esm/rpc/protocol/transports/fetch.d.ts +9 -0
- package/esm/rpc/protocol/transports/fetch.js +30 -0
- package/esm/rpc/protocol/transports/in_memory.d.ts +27 -0
- package/esm/rpc/protocol/transports/in_memory.js +125 -0
- package/esm/rpc/protocol/transports/transport_helpers.d.ts +118 -0
- package/esm/rpc/protocol/transports/transport_helpers.js +112 -0
- package/esm/rpc/protocol/transports/websocket.d.ts +9 -0
- package/esm/rpc/protocol/transports/websocket.js +102 -0
- package/esm/rpc/protocol/wire_format/framing.d.ts +44 -0
- package/esm/rpc/protocol/wire_format/framing.js +74 -0
- package/esm/rpc/protocol/wire_format/handshake.d.ts +110 -0
- package/esm/rpc/protocol/wire_format/handshake.js +239 -0
- package/esm/rpc/protocol/wire_format/messages.d.ts +192 -0
- package/esm/rpc/protocol/wire_format/messages.js +175 -0
- package/esm/rpc/protocol/wire_format/metadata.d.ts +44 -0
- package/esm/rpc/protocol/wire_format/metadata.js +74 -0
- package/esm/rpc/protocol_core.d.ts +121 -0
- package/esm/rpc/protocol_core.js +285 -0
- package/esm/schemas/base_type.d.ts +41 -0
- package/esm/schemas/base_type.js +49 -0
- package/esm/schemas/complex/array_type.d.ts +100 -0
- package/esm/schemas/complex/array_type.js +299 -0
- package/esm/schemas/complex/enum_type.d.ts +81 -0
- package/esm/schemas/complex/enum_type.js +217 -0
- package/esm/schemas/complex/fixed_type.d.ts +99 -0
- package/esm/schemas/complex/fixed_type.js +208 -0
- package/esm/schemas/complex/map_type.d.ts +97 -0
- package/esm/schemas/complex/map_type.js +290 -0
- package/esm/schemas/complex/named_type.d.ts +30 -0
- package/esm/schemas/complex/named_type.js +57 -0
- package/esm/schemas/complex/record_type.d.ts +165 -0
- package/esm/schemas/complex/record_type.js +547 -0
- package/esm/schemas/complex/resolve_names.d.ts +32 -0
- package/esm/schemas/complex/resolve_names.js +85 -0
- package/esm/schemas/complex/union_type.d.ts +116 -0
- package/esm/schemas/complex/union_type.js +392 -0
- package/esm/schemas/error.d.ts +31 -0
- package/esm/schemas/error.js +67 -0
- package/esm/schemas/json.d.ts +6 -0
- package/esm/schemas/json.js +35 -0
- package/esm/schemas/logical/decimal_logical_type.d.ts +49 -0
- package/esm/schemas/logical/decimal_logical_type.js +145 -0
- package/esm/schemas/logical/duration_logical_type.d.ts +66 -0
- package/esm/schemas/logical/duration_logical_type.js +112 -0
- package/esm/schemas/logical/logical_type.d.ts +138 -0
- package/esm/schemas/logical/logical_type.js +240 -0
- package/esm/schemas/logical/temporal_logical_types.d.ts +215 -0
- package/esm/schemas/logical/temporal_logical_types.js +364 -0
- package/esm/schemas/logical/uuid_logical_type.d.ts +63 -0
- package/esm/schemas/logical/uuid_logical_type.js +146 -0
- package/esm/schemas/primitive/boolean_type.d.ts +49 -0
- package/esm/schemas/primitive/boolean_type.js +75 -0
- package/esm/schemas/primitive/bytes_type.d.ts +49 -0
- package/esm/schemas/primitive/bytes_type.js +136 -0
- package/esm/schemas/primitive/double_type.d.ts +38 -0
- package/esm/schemas/primitive/double_type.js +98 -0
- package/esm/schemas/primitive/fixed_size_base_type.d.ts +28 -0
- package/esm/schemas/primitive/fixed_size_base_type.js +33 -0
- package/esm/schemas/primitive/float_type.d.ts +38 -0
- package/esm/schemas/primitive/float_type.js +88 -0
- package/esm/schemas/primitive/int_type.d.ts +31 -0
- package/esm/schemas/primitive/int_type.js +63 -0
- package/esm/schemas/primitive/long_type.d.ts +36 -0
- package/esm/schemas/primitive/long_type.js +95 -0
- package/esm/schemas/primitive/null_type.d.ts +45 -0
- package/esm/schemas/primitive/null_type.js +71 -0
- package/esm/schemas/primitive/primitive_type.d.ts +17 -0
- package/esm/schemas/primitive/primitive_type.js +27 -0
- package/esm/schemas/primitive/string_type.d.ts +34 -0
- package/esm/schemas/primitive/string_type.js +81 -0
- package/esm/schemas/resolver.d.ts +25 -0
- package/esm/schemas/resolver.js +25 -0
- package/esm/schemas/type.d.ts +101 -0
- package/esm/schemas/type.js +6 -0
- package/esm/serialization/avro_constants.d.ts +13 -0
- package/esm/serialization/avro_constants.js +29 -0
- package/esm/serialization/avro_file_parser.d.ts +68 -0
- package/esm/serialization/avro_file_parser.js +191 -0
- package/esm/serialization/avro_file_writer.d.ts +63 -0
- package/esm/serialization/avro_file_writer.js +235 -0
- package/esm/serialization/buffers/blob_readable_buffer.d.ts +53 -0
- package/esm/serialization/buffers/blob_readable_buffer.js +80 -0
- package/esm/serialization/buffers/buffer.d.ts +37 -0
- package/esm/serialization/buffers/buffer.js +1 -0
- package/esm/serialization/buffers/in_memory_buffer.d.ts +121 -0
- package/esm/serialization/buffers/in_memory_buffer.js +206 -0
- package/esm/serialization/clamp.d.ts +10 -0
- package/esm/serialization/clamp.js +19 -0
- package/esm/serialization/compare_bytes.d.ts +9 -0
- package/esm/serialization/compare_bytes.js +45 -0
- package/esm/serialization/conversion.d.ts +8 -0
- package/esm/serialization/conversion.js +15 -0
- package/esm/serialization/decoders/decoder.d.ts +16 -0
- package/esm/serialization/decoders/decoder.js +1 -0
- package/esm/serialization/decoders/deflate_decoder.d.ts +15 -0
- package/esm/serialization/decoders/deflate_decoder.js +26 -0
- package/esm/serialization/decoders/null_decoder.d.ts +12 -0
- package/esm/serialization/decoders/null_decoder.js +13 -0
- package/esm/serialization/encoders/deflate_encoder.d.ts +15 -0
- package/esm/serialization/encoders/deflate_encoder.js +26 -0
- package/esm/serialization/encoders/encoder.d.ts +16 -0
- package/esm/serialization/encoders/encoder.js +1 -0
- package/esm/serialization/encoders/null_encoder.d.ts +12 -0
- package/esm/serialization/encoders/null_encoder.js +13 -0
- package/esm/serialization/manipulate_bytes.d.ts +6 -0
- package/esm/serialization/manipulate_bytes.js +13 -0
- package/esm/serialization/read_uint_le.d.ts +4 -0
- package/esm/serialization/read_uint_le.js +14 -0
- package/esm/serialization/streams/fixed_size_stream_readable_buffer_adapter.d.ts +52 -0
- package/esm/serialization/streams/fixed_size_stream_readable_buffer_adapter.js +129 -0
- package/esm/serialization/streams/forward_only_stream_readable_buffer_adapter.d.ts +36 -0
- package/esm/serialization/streams/forward_only_stream_readable_buffer_adapter.js +114 -0
- package/esm/serialization/streams/stream_readable_buffer.d.ts +41 -0
- package/esm/serialization/streams/stream_readable_buffer.js +64 -0
- package/esm/serialization/streams/stream_readable_buffer_adapter.d.ts +38 -0
- package/esm/serialization/streams/stream_readable_buffer_adapter.js +105 -0
- package/esm/serialization/streams/stream_writable_buffer.d.ts +42 -0
- package/esm/serialization/streams/stream_writable_buffer.js +64 -0
- package/esm/serialization/streams/stream_writable_buffer_adapter.d.ts +36 -0
- package/esm/serialization/streams/stream_writable_buffer_adapter.js +65 -0
- package/esm/serialization/streams/streams.d.ts +29 -0
- package/esm/serialization/streams/streams.js +1 -0
- package/esm/serialization/tap.d.ts +427 -0
- package/esm/serialization/tap.js +605 -0
- package/esm/serialization/text_encoding.d.ts +12 -0
- package/esm/serialization/text_encoding.js +14 -0
- package/esm/type/create_type.d.ts +54 -0
- package/esm/type/create_type.js +401 -0
- package/package.json +33 -0
- package/sachitv-avro-typescript-0.4.0.tgz +0 -0
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
|
|
2
|
+
if (kind === "m") throw new TypeError("Private method is not writable");
|
|
3
|
+
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
|
|
4
|
+
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
|
5
|
+
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
|
|
6
|
+
};
|
|
7
|
+
var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {
|
|
8
|
+
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
|
9
|
+
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
|
10
|
+
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
|
11
|
+
};
|
|
12
|
+
var _AvroFileParser_instances, _AvroFileParser_buffer, _AvroFileParser_header, _AvroFileParser_headerTap, _AvroFileParser_readerSchema, _AvroFileParser_readerType, _AvroFileParser_resolver, _AvroFileParser_decoders, _AvroFileParser_builtInDecoders, _AvroFileParser_getDecoder, _AvroFileParser_parseHeader, _AvroFileParser_getResolver, _AvroFileParser_createReaderType;
|
|
13
|
+
import { createType } from "../type/create_type.js";
|
|
14
|
+
import { Type } from "../schemas/type.js";
|
|
15
|
+
import { ReadableTap } from "./tap.js";
|
|
16
|
+
import { DeflateDecoder } from "./decoders/deflate_decoder.js";
|
|
17
|
+
import { NullDecoder } from "./decoders/null_decoder.js";
|
|
18
|
+
import { BLOCK_TYPE, HEADER_TYPE, MAGIC_BYTES } from "./avro_constants.js";
|
|
19
|
+
// Re-export constants for backward compatibility
|
|
20
|
+
export { BLOCK_TYPE, HEADER_TYPE, MAGIC_BYTES };
|
|
21
|
+
/**
|
|
22
|
+
* Parser for Avro object container files that reads headers, resolves schemas,
|
|
23
|
+
* and streams decoded records from readable buffers.
|
|
24
|
+
*/
|
|
25
|
+
export class AvroFileParser {
|
|
26
|
+
/**
|
|
27
|
+
* Creates a new AvroFileParser.
|
|
28
|
+
*
|
|
29
|
+
* @param buffer The readable buffer containing Avro data.
|
|
30
|
+
* @param options Configuration options for parsing.
|
|
31
|
+
*/
|
|
32
|
+
constructor(buffer, options) {
|
|
33
|
+
_AvroFileParser_instances.add(this);
|
|
34
|
+
_AvroFileParser_buffer.set(this, void 0);
|
|
35
|
+
_AvroFileParser_header.set(this, void 0);
|
|
36
|
+
_AvroFileParser_headerTap.set(this, void 0);
|
|
37
|
+
_AvroFileParser_readerSchema.set(this, void 0);
|
|
38
|
+
_AvroFileParser_readerType.set(this, void 0);
|
|
39
|
+
_AvroFileParser_resolver.set(this, void 0);
|
|
40
|
+
_AvroFileParser_decoders.set(this, void 0);
|
|
41
|
+
_AvroFileParser_builtInDecoders.set(this, void 0);
|
|
42
|
+
__classPrivateFieldSet(this, _AvroFileParser_buffer, buffer, "f");
|
|
43
|
+
__classPrivateFieldSet(this, _AvroFileParser_readerSchema, options?.readerSchema, "f");
|
|
44
|
+
// Initialize built-in decoders
|
|
45
|
+
__classPrivateFieldSet(this, _AvroFileParser_builtInDecoders, {
|
|
46
|
+
"null": new NullDecoder(),
|
|
47
|
+
"deflate": new DeflateDecoder(),
|
|
48
|
+
}, "f");
|
|
49
|
+
// Validate custom decoders (cannot override built-ins)
|
|
50
|
+
const customDecoders = options?.decoders || {};
|
|
51
|
+
for (const codec of Object.keys(customDecoders)) {
|
|
52
|
+
if (codec in __classPrivateFieldGet(this, _AvroFileParser_builtInDecoders, "f")) {
|
|
53
|
+
throw new Error(`Cannot override built-in decoder for codec: ${codec}`);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
__classPrivateFieldSet(this, _AvroFileParser_decoders, { ...customDecoders }, "f");
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Gets the parsed Avro file header with proper typing.
|
|
60
|
+
*
|
|
61
|
+
* @returns Promise that resolves to the parsed header information.
|
|
62
|
+
* @throws Error if the file is not a valid Avro file.
|
|
63
|
+
*/
|
|
64
|
+
async getHeader() {
|
|
65
|
+
const header = await __classPrivateFieldGet(this, _AvroFileParser_instances, "m", _AvroFileParser_parseHeader).call(this);
|
|
66
|
+
return {
|
|
67
|
+
magic: header.magic,
|
|
68
|
+
meta: header.meta,
|
|
69
|
+
sync: header.sync,
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Asynchronously iterates over all records in the Avro file.
|
|
74
|
+
*
|
|
75
|
+
* @returns AsyncIterableIterator that yields each record.
|
|
76
|
+
* @throws Error if the file contains invalid data or is corrupted.
|
|
77
|
+
*/
|
|
78
|
+
async *iterRecords() {
|
|
79
|
+
const header = await __classPrivateFieldGet(this, _AvroFileParser_instances, "m", _AvroFileParser_parseHeader).call(this);
|
|
80
|
+
const { schemaType, meta } = header;
|
|
81
|
+
const resolver = __classPrivateFieldGet(this, _AvroFileParser_instances, "m", _AvroFileParser_getResolver).call(this, schemaType);
|
|
82
|
+
// Get the codec from metadata
|
|
83
|
+
const codecBytes = meta.get("avro.codec");
|
|
84
|
+
const codecStr = (() => {
|
|
85
|
+
if (codecBytes === undefined) {
|
|
86
|
+
return "null";
|
|
87
|
+
}
|
|
88
|
+
const decoded = new TextDecoder().decode(codecBytes);
|
|
89
|
+
return decoded.length === 0 ? "null" : decoded;
|
|
90
|
+
})();
|
|
91
|
+
const decoder = __classPrivateFieldGet(this, _AvroFileParser_instances, "m", _AvroFileParser_getDecoder).call(this, codecStr);
|
|
92
|
+
// Use the tap that's positioned after the header
|
|
93
|
+
const tap = __classPrivateFieldGet(this, _AvroFileParser_headerTap, "f");
|
|
94
|
+
while (await tap.canReadMore()) {
|
|
95
|
+
const block = await BLOCK_TYPE.read(tap);
|
|
96
|
+
// Decompress block data if needed
|
|
97
|
+
const decompressedData = await decoder.decode(block.data);
|
|
98
|
+
const arrayBuffer = new ArrayBuffer(decompressedData.length);
|
|
99
|
+
new Uint8Array(arrayBuffer).set(decompressedData);
|
|
100
|
+
const recordTap = new ReadableTap(arrayBuffer);
|
|
101
|
+
// Yield each record in the block
|
|
102
|
+
for (let i = 0n; i < block.count; i += 1n) {
|
|
103
|
+
const record = resolver
|
|
104
|
+
? await resolver.read(recordTap)
|
|
105
|
+
: await schemaType.read(recordTap);
|
|
106
|
+
yield record;
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
_AvroFileParser_buffer = new WeakMap(), _AvroFileParser_header = new WeakMap(), _AvroFileParser_headerTap = new WeakMap(), _AvroFileParser_readerSchema = new WeakMap(), _AvroFileParser_readerType = new WeakMap(), _AvroFileParser_resolver = new WeakMap(), _AvroFileParser_decoders = new WeakMap(), _AvroFileParser_builtInDecoders = new WeakMap(), _AvroFileParser_instances = new WeakSet(), _AvroFileParser_getDecoder = function _AvroFileParser_getDecoder(codec) {
|
|
112
|
+
// Check built-in decoders first
|
|
113
|
+
if (codec in __classPrivateFieldGet(this, _AvroFileParser_builtInDecoders, "f")) {
|
|
114
|
+
return __classPrivateFieldGet(this, _AvroFileParser_builtInDecoders, "f")[codec];
|
|
115
|
+
}
|
|
116
|
+
// Check custom decoders
|
|
117
|
+
if (codec in __classPrivateFieldGet(this, _AvroFileParser_decoders, "f")) {
|
|
118
|
+
return __classPrivateFieldGet(this, _AvroFileParser_decoders, "f")[codec];
|
|
119
|
+
}
|
|
120
|
+
throw new Error(`Unsupported codec: ${codec}. Provide a custom decoder.`);
|
|
121
|
+
}, _AvroFileParser_parseHeader =
|
|
122
|
+
/**
|
|
123
|
+
* Private method to parse the Avro file header and cache it.
|
|
124
|
+
*
|
|
125
|
+
* @returns Promise that resolves to the parsed header information.
|
|
126
|
+
* @throws Error if the file is not a valid Avro file.
|
|
127
|
+
*/
|
|
128
|
+
async function _AvroFileParser_parseHeader() {
|
|
129
|
+
if (__classPrivateFieldGet(this, _AvroFileParser_header, "f")) {
|
|
130
|
+
return __classPrivateFieldGet(this, _AvroFileParser_header, "f");
|
|
131
|
+
}
|
|
132
|
+
const tap = new ReadableTap(__classPrivateFieldGet(this, _AvroFileParser_buffer, "f"));
|
|
133
|
+
const header = await HEADER_TYPE.read(tap);
|
|
134
|
+
// Validate magic bytes
|
|
135
|
+
const magic = header.magic;
|
|
136
|
+
for (let i = 0; i < MAGIC_BYTES.length; i++) {
|
|
137
|
+
if (magic[i] !== MAGIC_BYTES[i]) {
|
|
138
|
+
throw new Error("Invalid AVRO file: incorrect magic bytes");
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
// Extract metadata
|
|
142
|
+
const meta = header.meta;
|
|
143
|
+
// Read and parse the schema
|
|
144
|
+
const schemaJson = meta.get("avro.schema");
|
|
145
|
+
if (!schemaJson) {
|
|
146
|
+
throw new Error("AVRO schema not found in metadata");
|
|
147
|
+
}
|
|
148
|
+
const schemaStr = new TextDecoder().decode(schemaJson);
|
|
149
|
+
const schemaType = createType(JSON.parse(schemaStr));
|
|
150
|
+
// Validate that we have a decoder for the codec
|
|
151
|
+
const codec = meta.get("avro.codec");
|
|
152
|
+
if (codec && codec.length > 0) {
|
|
153
|
+
const codecStr = new TextDecoder().decode(codec);
|
|
154
|
+
// This will throw if codec is not supported
|
|
155
|
+
__classPrivateFieldGet(this, _AvroFileParser_instances, "m", _AvroFileParser_getDecoder).call(this, codecStr);
|
|
156
|
+
}
|
|
157
|
+
const sync = header.sync;
|
|
158
|
+
__classPrivateFieldSet(this, _AvroFileParser_header, {
|
|
159
|
+
magic,
|
|
160
|
+
meta,
|
|
161
|
+
sync,
|
|
162
|
+
schemaType,
|
|
163
|
+
}, "f");
|
|
164
|
+
// Store the tap at its current position for reading blocks
|
|
165
|
+
__classPrivateFieldSet(this, _AvroFileParser_headerTap, tap, "f");
|
|
166
|
+
return __classPrivateFieldGet(this, _AvroFileParser_header, "f");
|
|
167
|
+
}, _AvroFileParser_getResolver = function _AvroFileParser_getResolver(writerType) {
|
|
168
|
+
if (__classPrivateFieldGet(this, _AvroFileParser_readerSchema, "f") === undefined || __classPrivateFieldGet(this, _AvroFileParser_readerSchema, "f") === null) {
|
|
169
|
+
return undefined;
|
|
170
|
+
}
|
|
171
|
+
if (!__classPrivateFieldGet(this, _AvroFileParser_readerType, "f")) {
|
|
172
|
+
__classPrivateFieldSet(this, _AvroFileParser_readerType, __classPrivateFieldGet(this, _AvroFileParser_instances, "m", _AvroFileParser_createReaderType).call(this, __classPrivateFieldGet(this, _AvroFileParser_readerSchema, "f")), "f");
|
|
173
|
+
}
|
|
174
|
+
if (!__classPrivateFieldGet(this, _AvroFileParser_resolver, "f")) {
|
|
175
|
+
__classPrivateFieldSet(this, _AvroFileParser_resolver, __classPrivateFieldGet(this, _AvroFileParser_readerType, "f").createResolver(writerType), "f");
|
|
176
|
+
}
|
|
177
|
+
return __classPrivateFieldGet(this, _AvroFileParser_resolver, "f");
|
|
178
|
+
}, _AvroFileParser_createReaderType = function _AvroFileParser_createReaderType(schema) {
|
|
179
|
+
if (schema instanceof Type) {
|
|
180
|
+
return schema;
|
|
181
|
+
}
|
|
182
|
+
if (typeof schema === "string") {
|
|
183
|
+
const trimmed = schema.trim();
|
|
184
|
+
if (trimmed.startsWith("{") || trimmed.startsWith("[")) {
|
|
185
|
+
const parsed = JSON.parse(trimmed);
|
|
186
|
+
return createType(parsed);
|
|
187
|
+
}
|
|
188
|
+
return createType(schema);
|
|
189
|
+
}
|
|
190
|
+
return createType(schema);
|
|
191
|
+
};
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import { type SchemaLike } from "../type/create_type.js";
|
|
2
|
+
import type { IWritableBuffer } from "./buffers/buffer.js";
|
|
3
|
+
import type { EncoderRegistry } from "./encoders/encoder.js";
|
|
4
|
+
/**
|
|
5
|
+
* Type for initializing metadata in an Avro file header.
|
|
6
|
+
* Can be a Map of string keys to Uint8Array values, or a plain object with string or Uint8Array values.
|
|
7
|
+
*/
|
|
8
|
+
export type MetadataInit = Map<string, Uint8Array> | Record<string, string | Uint8Array>;
|
|
9
|
+
/**
|
|
10
|
+
* Options for configuring an AvroFileWriter.
|
|
11
|
+
*/
|
|
12
|
+
export interface AvroWriterOptions {
|
|
13
|
+
/**
|
|
14
|
+
* The Avro schema defining the structure of records to be written.
|
|
15
|
+
*/
|
|
16
|
+
schema: SchemaLike;
|
|
17
|
+
/**
|
|
18
|
+
* The compression codec to use for data blocks. Defaults to "null" (no compression).
|
|
19
|
+
*/
|
|
20
|
+
codec?: string;
|
|
21
|
+
/**
|
|
22
|
+
* The maximum size in bytes for data blocks. Defaults to 64000 bytes.
|
|
23
|
+
*/
|
|
24
|
+
blockSize?: number;
|
|
25
|
+
/**
|
|
26
|
+
* A 16-byte sync marker used to delineate blocks. If not provided, a random one is generated.
|
|
27
|
+
*/
|
|
28
|
+
syncMarker?: Uint8Array;
|
|
29
|
+
/**
|
|
30
|
+
* Additional metadata to include in the Avro file header.
|
|
31
|
+
*/
|
|
32
|
+
metadata?: MetadataInit;
|
|
33
|
+
/**
|
|
34
|
+
* Custom encoders for compression codecs not built into the library.
|
|
35
|
+
*/
|
|
36
|
+
encoders?: EncoderRegistry;
|
|
37
|
+
}
|
|
38
|
+
/**
|
|
39
|
+
* Low-level writer for Avro object container files that manages schema
|
|
40
|
+
* serialization, block encoding, metadata, and sync markers.
|
|
41
|
+
*/
|
|
42
|
+
export declare class AvroFileWriter {
|
|
43
|
+
#private;
|
|
44
|
+
/**
|
|
45
|
+
* Creates a new AvroFileWriter.
|
|
46
|
+
* @param buffer The writable buffer to write to.
|
|
47
|
+
* @param options Configuration options including schema and codec.
|
|
48
|
+
*/
|
|
49
|
+
constructor(buffer: IWritableBuffer, options: AvroWriterOptions);
|
|
50
|
+
/**
|
|
51
|
+
* Append a single record to the file.
|
|
52
|
+
*/
|
|
53
|
+
append(record: unknown): Promise<void>;
|
|
54
|
+
/**
|
|
55
|
+
* Flush pending data and prevent further writes.
|
|
56
|
+
*/
|
|
57
|
+
close(): Promise<void>;
|
|
58
|
+
/**
|
|
59
|
+
* Manually flush pending records to a block.
|
|
60
|
+
* This can be called to force writing of accumulated records before the block size threshold is reached.
|
|
61
|
+
*/
|
|
62
|
+
flushBlock(): Promise<void>;
|
|
63
|
+
}
|
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
|
|
2
|
+
if (kind === "m") throw new TypeError("Private method is not writable");
|
|
3
|
+
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
|
|
4
|
+
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
|
5
|
+
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
|
|
6
|
+
};
|
|
7
|
+
var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {
|
|
8
|
+
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
|
9
|
+
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
|
10
|
+
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
|
11
|
+
};
|
|
12
|
+
var _AvroFileWriter_instances, _AvroFileWriter_tap, _AvroFileWriter_schemaType, _AvroFileWriter_codec, _AvroFileWriter_encoder, _AvroFileWriter_blockSize, _AvroFileWriter_syncMarker, _AvroFileWriter_metadata, _AvroFileWriter_pendingRecords, _AvroFileWriter_pendingBytes, _AvroFileWriter_pendingCount, _AvroFileWriter_headerPromise, _AvroFileWriter_headerWritten, _AvroFileWriter_closed, _AvroFileWriter_builtInEncoders, _AvroFileWriter_customEncoders, _AvroFileWriter_ensureOpen, _AvroFileWriter_validateBlockSize, _AvroFileWriter_initializeSyncMarker, _AvroFileWriter_validateCustomEncoders, _AvroFileWriter_resolveEncoder, _AvroFileWriter_buildMetadata, _AvroFileWriter_assertMetadataKey, _AvroFileWriter_ensureHeaderWritten, _AvroFileWriter_writeHeader, _AvroFileWriter_encodeRecord, _AvroFileWriter_combinePending;
|
|
13
|
+
import { createType } from "../type/create_type.js";
|
|
14
|
+
import { WritableTap } from "./tap.js";
|
|
15
|
+
import { BLOCK_TYPE, HEADER_TYPE, MAGIC_BYTES } from "./avro_constants.js";
|
|
16
|
+
import { DeflateEncoder } from "./encoders/deflate_encoder.js";
|
|
17
|
+
import { NullEncoder } from "./encoders/null_encoder.js";
|
|
18
|
+
import { encode as encodeString } from "./text_encoding.js";
|
|
19
|
+
/**
|
|
20
|
+
* Default block size in bytes for Avro data blocks.
|
|
21
|
+
* Set to 64000 to match the default used by the Java Avro libraries.
|
|
22
|
+
*/
|
|
23
|
+
const DEFAULT_BLOCK_SIZE_BYTES = 64000;
|
|
24
|
+
const SYNC_MARKER_SIZE = 16;
|
|
25
|
+
const RESERVED_METADATA_KEYS = new Set(["avro.schema", "avro.codec"]);
|
|
26
|
+
/**
|
|
27
|
+
* Low-level writer for Avro object container files that manages schema
|
|
28
|
+
* serialization, block encoding, metadata, and sync markers.
|
|
29
|
+
*/
|
|
30
|
+
export class AvroFileWriter {
|
|
31
|
+
/**
|
|
32
|
+
* Creates a new AvroFileWriter.
|
|
33
|
+
* @param buffer The writable buffer to write to.
|
|
34
|
+
* @param options Configuration options including schema and codec.
|
|
35
|
+
*/
|
|
36
|
+
constructor(buffer, options) {
|
|
37
|
+
_AvroFileWriter_instances.add(this);
|
|
38
|
+
_AvroFileWriter_tap.set(this, void 0);
|
|
39
|
+
_AvroFileWriter_schemaType.set(this, void 0);
|
|
40
|
+
_AvroFileWriter_codec.set(this, void 0);
|
|
41
|
+
_AvroFileWriter_encoder.set(this, void 0);
|
|
42
|
+
_AvroFileWriter_blockSize.set(this, void 0);
|
|
43
|
+
_AvroFileWriter_syncMarker.set(this, void 0);
|
|
44
|
+
_AvroFileWriter_metadata.set(this, void 0);
|
|
45
|
+
_AvroFileWriter_pendingRecords.set(this, []);
|
|
46
|
+
_AvroFileWriter_pendingBytes.set(this, 0);
|
|
47
|
+
_AvroFileWriter_pendingCount.set(this, 0);
|
|
48
|
+
_AvroFileWriter_headerPromise.set(this, void 0);
|
|
49
|
+
_AvroFileWriter_headerWritten.set(this, false);
|
|
50
|
+
_AvroFileWriter_closed.set(this, false);
|
|
51
|
+
_AvroFileWriter_builtInEncoders.set(this, void 0);
|
|
52
|
+
_AvroFileWriter_customEncoders.set(this, void 0);
|
|
53
|
+
if (!options || !options.schema) {
|
|
54
|
+
throw new Error("Avro writer requires a schema.");
|
|
55
|
+
}
|
|
56
|
+
__classPrivateFieldSet(this, _AvroFileWriter_tap, new WritableTap(buffer), "f");
|
|
57
|
+
__classPrivateFieldSet(this, _AvroFileWriter_schemaType, createType(options.schema), "f");
|
|
58
|
+
__classPrivateFieldSet(this, _AvroFileWriter_codec, options.codec ?? "null", "f");
|
|
59
|
+
__classPrivateFieldSet(this, _AvroFileWriter_blockSize, __classPrivateFieldGet(this, _AvroFileWriter_instances, "m", _AvroFileWriter_validateBlockSize).call(this, options.blockSize), "f");
|
|
60
|
+
__classPrivateFieldSet(this, _AvroFileWriter_syncMarker, __classPrivateFieldGet(this, _AvroFileWriter_instances, "m", _AvroFileWriter_initializeSyncMarker).call(this, options.syncMarker), "f");
|
|
61
|
+
__classPrivateFieldSet(this, _AvroFileWriter_builtInEncoders, {
|
|
62
|
+
"null": new NullEncoder(),
|
|
63
|
+
"deflate": new DeflateEncoder(),
|
|
64
|
+
}, "f");
|
|
65
|
+
__classPrivateFieldSet(this, _AvroFileWriter_customEncoders, __classPrivateFieldGet(this, _AvroFileWriter_instances, "m", _AvroFileWriter_validateCustomEncoders).call(this, options.encoders), "f");
|
|
66
|
+
__classPrivateFieldSet(this, _AvroFileWriter_encoder, __classPrivateFieldGet(this, _AvroFileWriter_instances, "m", _AvroFileWriter_resolveEncoder).call(this, __classPrivateFieldGet(this, _AvroFileWriter_codec, "f")), "f");
|
|
67
|
+
__classPrivateFieldSet(this, _AvroFileWriter_metadata, __classPrivateFieldGet(this, _AvroFileWriter_instances, "m", _AvroFileWriter_buildMetadata).call(this, options.metadata), "f");
|
|
68
|
+
__classPrivateFieldGet(this, _AvroFileWriter_metadata, "f").set("avro.schema", encodeString(JSON.stringify(__classPrivateFieldGet(this, _AvroFileWriter_schemaType, "f").toJSON())));
|
|
69
|
+
if (__classPrivateFieldGet(this, _AvroFileWriter_codec, "f") !== "null") {
|
|
70
|
+
__classPrivateFieldGet(this, _AvroFileWriter_metadata, "f").set("avro.codec", encodeString(__classPrivateFieldGet(this, _AvroFileWriter_codec, "f")));
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
/**
|
|
74
|
+
* Append a single record to the file.
|
|
75
|
+
*/
|
|
76
|
+
async append(record) {
|
|
77
|
+
__classPrivateFieldGet(this, _AvroFileWriter_instances, "m", _AvroFileWriter_ensureOpen).call(this);
|
|
78
|
+
await __classPrivateFieldGet(this, _AvroFileWriter_instances, "m", _AvroFileWriter_ensureHeaderWritten).call(this);
|
|
79
|
+
if (!__classPrivateFieldGet(this, _AvroFileWriter_schemaType, "f").isValid(record)) {
|
|
80
|
+
throw new Error("Record does not conform to the schema.");
|
|
81
|
+
}
|
|
82
|
+
const recordBytes = await __classPrivateFieldGet(this, _AvroFileWriter_instances, "m", _AvroFileWriter_encodeRecord).call(this, record);
|
|
83
|
+
__classPrivateFieldGet(this, _AvroFileWriter_pendingRecords, "f").push(recordBytes);
|
|
84
|
+
__classPrivateFieldSet(this, _AvroFileWriter_pendingBytes, __classPrivateFieldGet(this, _AvroFileWriter_pendingBytes, "f") + recordBytes.length, "f");
|
|
85
|
+
__classPrivateFieldSet(this, _AvroFileWriter_pendingCount, __classPrivateFieldGet(this, _AvroFileWriter_pendingCount, "f") + 1, "f");
|
|
86
|
+
if (__classPrivateFieldGet(this, _AvroFileWriter_pendingBytes, "f") >= __classPrivateFieldGet(this, _AvroFileWriter_blockSize, "f")) {
|
|
87
|
+
await this.flushBlock();
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
/**
|
|
91
|
+
* Flush pending data and prevent further writes.
|
|
92
|
+
*/
|
|
93
|
+
async close() {
|
|
94
|
+
if (__classPrivateFieldGet(this, _AvroFileWriter_closed, "f")) {
|
|
95
|
+
return;
|
|
96
|
+
}
|
|
97
|
+
await __classPrivateFieldGet(this, _AvroFileWriter_instances, "m", _AvroFileWriter_ensureHeaderWritten).call(this);
|
|
98
|
+
await this.flushBlock();
|
|
99
|
+
__classPrivateFieldSet(this, _AvroFileWriter_closed, true, "f");
|
|
100
|
+
}
|
|
101
|
+
/**
|
|
102
|
+
* Manually flush pending records to a block.
|
|
103
|
+
* This can be called to force writing of accumulated records before the block size threshold is reached.
|
|
104
|
+
*/
|
|
105
|
+
async flushBlock() {
|
|
106
|
+
__classPrivateFieldGet(this, _AvroFileWriter_instances, "m", _AvroFileWriter_ensureOpen).call(this);
|
|
107
|
+
await __classPrivateFieldGet(this, _AvroFileWriter_instances, "m", _AvroFileWriter_ensureHeaderWritten).call(this);
|
|
108
|
+
if (__classPrivateFieldGet(this, _AvroFileWriter_pendingCount, "f") === 0) {
|
|
109
|
+
return;
|
|
110
|
+
}
|
|
111
|
+
const combined = __classPrivateFieldGet(this, _AvroFileWriter_instances, "m", _AvroFileWriter_combinePending).call(this);
|
|
112
|
+
const encoded = await __classPrivateFieldGet(this, _AvroFileWriter_encoder, "f").encode(combined);
|
|
113
|
+
const block = {
|
|
114
|
+
count: BigInt(__classPrivateFieldGet(this, _AvroFileWriter_pendingCount, "f")),
|
|
115
|
+
data: encoded,
|
|
116
|
+
sync: __classPrivateFieldGet(this, _AvroFileWriter_syncMarker, "f"),
|
|
117
|
+
};
|
|
118
|
+
await BLOCK_TYPE.write(__classPrivateFieldGet(this, _AvroFileWriter_tap, "f"), block);
|
|
119
|
+
__classPrivateFieldSet(this, _AvroFileWriter_pendingRecords, [], "f");
|
|
120
|
+
__classPrivateFieldSet(this, _AvroFileWriter_pendingBytes, 0, "f");
|
|
121
|
+
__classPrivateFieldSet(this, _AvroFileWriter_pendingCount, 0, "f");
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
_AvroFileWriter_tap = new WeakMap(), _AvroFileWriter_schemaType = new WeakMap(), _AvroFileWriter_codec = new WeakMap(), _AvroFileWriter_encoder = new WeakMap(), _AvroFileWriter_blockSize = new WeakMap(), _AvroFileWriter_syncMarker = new WeakMap(), _AvroFileWriter_metadata = new WeakMap(), _AvroFileWriter_pendingRecords = new WeakMap(), _AvroFileWriter_pendingBytes = new WeakMap(), _AvroFileWriter_pendingCount = new WeakMap(), _AvroFileWriter_headerPromise = new WeakMap(), _AvroFileWriter_headerWritten = new WeakMap(), _AvroFileWriter_closed = new WeakMap(), _AvroFileWriter_builtInEncoders = new WeakMap(), _AvroFileWriter_customEncoders = new WeakMap(), _AvroFileWriter_instances = new WeakSet(), _AvroFileWriter_ensureOpen = function _AvroFileWriter_ensureOpen() {
|
|
125
|
+
if (__classPrivateFieldGet(this, _AvroFileWriter_closed, "f")) {
|
|
126
|
+
throw new Error("Avro writer is already closed.");
|
|
127
|
+
}
|
|
128
|
+
}, _AvroFileWriter_validateBlockSize = function _AvroFileWriter_validateBlockSize(blockSize) {
|
|
129
|
+
const size = blockSize ?? DEFAULT_BLOCK_SIZE_BYTES;
|
|
130
|
+
if (!Number.isFinite(size) || !Number.isInteger(size) || size <= 0) {
|
|
131
|
+
throw new RangeError("blockSize must be a positive integer byte count.");
|
|
132
|
+
}
|
|
133
|
+
return size;
|
|
134
|
+
}, _AvroFileWriter_initializeSyncMarker = function _AvroFileWriter_initializeSyncMarker(marker) {
|
|
135
|
+
if (!marker) {
|
|
136
|
+
const generated = new Uint8Array(SYNC_MARKER_SIZE);
|
|
137
|
+
crypto.getRandomValues(generated);
|
|
138
|
+
return generated;
|
|
139
|
+
}
|
|
140
|
+
if (marker.length !== SYNC_MARKER_SIZE) {
|
|
141
|
+
throw new Error(`Sync marker must be ${SYNC_MARKER_SIZE} bytes long.`);
|
|
142
|
+
}
|
|
143
|
+
return marker.slice();
|
|
144
|
+
}, _AvroFileWriter_validateCustomEncoders = function _AvroFileWriter_validateCustomEncoders(encoders) {
|
|
145
|
+
if (!encoders) {
|
|
146
|
+
return {};
|
|
147
|
+
}
|
|
148
|
+
for (const codec of Object.keys(encoders)) {
|
|
149
|
+
if (codec in __classPrivateFieldGet(this, _AvroFileWriter_builtInEncoders, "f")) {
|
|
150
|
+
throw new Error(`Cannot override built-in encoder for codec: ${codec}`);
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
return { ...encoders };
|
|
154
|
+
}, _AvroFileWriter_resolveEncoder = function _AvroFileWriter_resolveEncoder(codec) {
|
|
155
|
+
if (codec in __classPrivateFieldGet(this, _AvroFileWriter_builtInEncoders, "f")) {
|
|
156
|
+
return __classPrivateFieldGet(this, _AvroFileWriter_builtInEncoders, "f")[codec];
|
|
157
|
+
}
|
|
158
|
+
if (codec in __classPrivateFieldGet(this, _AvroFileWriter_customEncoders, "f")) {
|
|
159
|
+
return __classPrivateFieldGet(this, _AvroFileWriter_customEncoders, "f")[codec];
|
|
160
|
+
}
|
|
161
|
+
throw new Error(`Unsupported codec: ${codec}. Provide a custom encoder.`);
|
|
162
|
+
}, _AvroFileWriter_buildMetadata = function _AvroFileWriter_buildMetadata(input) {
|
|
163
|
+
const metadata = new Map();
|
|
164
|
+
if (!input) {
|
|
165
|
+
return metadata;
|
|
166
|
+
}
|
|
167
|
+
const setEntry = (key, value) => {
|
|
168
|
+
__classPrivateFieldGet(this, _AvroFileWriter_instances, "m", _AvroFileWriter_assertMetadataKey).call(this, key);
|
|
169
|
+
if (value instanceof Uint8Array) {
|
|
170
|
+
metadata.set(key, value.slice());
|
|
171
|
+
}
|
|
172
|
+
else {
|
|
173
|
+
metadata.set(key, encodeString(value));
|
|
174
|
+
}
|
|
175
|
+
};
|
|
176
|
+
if (input instanceof Map) {
|
|
177
|
+
for (const [key, value] of input.entries()) {
|
|
178
|
+
setEntry(key, value);
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
else {
|
|
182
|
+
for (const [key, value] of Object.entries(input)) {
|
|
183
|
+
setEntry(key, value);
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
return metadata;
|
|
187
|
+
}, _AvroFileWriter_assertMetadataKey = function _AvroFileWriter_assertMetadataKey(key) {
|
|
188
|
+
if (typeof key !== "string" || key.length === 0) {
|
|
189
|
+
throw new Error("Metadata keys must be non-empty strings.");
|
|
190
|
+
}
|
|
191
|
+
if (RESERVED_METADATA_KEYS.has(key)) {
|
|
192
|
+
throw new Error(`Metadata key "${key}" is reserved and managed by the Avro writer.`);
|
|
193
|
+
}
|
|
194
|
+
}, _AvroFileWriter_ensureHeaderWritten =
|
|
195
|
+
/**
|
|
196
|
+
* Ensures the header has been written to the output.
|
|
197
|
+
*/
|
|
198
|
+
async function _AvroFileWriter_ensureHeaderWritten() {
|
|
199
|
+
if (__classPrivateFieldGet(this, _AvroFileWriter_headerWritten, "f")) {
|
|
200
|
+
return;
|
|
201
|
+
}
|
|
202
|
+
if (!__classPrivateFieldGet(this, _AvroFileWriter_headerPromise, "f")) {
|
|
203
|
+
__classPrivateFieldSet(this, _AvroFileWriter_headerPromise, __classPrivateFieldGet(this, _AvroFileWriter_instances, "m", _AvroFileWriter_writeHeader).call(this), "f");
|
|
204
|
+
}
|
|
205
|
+
await __classPrivateFieldGet(this, _AvroFileWriter_headerPromise, "f");
|
|
206
|
+
}, _AvroFileWriter_writeHeader =
|
|
207
|
+
/**
|
|
208
|
+
* Writes the Avro file header to the output.
|
|
209
|
+
*/
|
|
210
|
+
async function _AvroFileWriter_writeHeader() {
|
|
211
|
+
const header = {
|
|
212
|
+
magic: MAGIC_BYTES,
|
|
213
|
+
meta: __classPrivateFieldGet(this, _AvroFileWriter_metadata, "f"),
|
|
214
|
+
sync: __classPrivateFieldGet(this, _AvroFileWriter_syncMarker, "f"),
|
|
215
|
+
};
|
|
216
|
+
await HEADER_TYPE.write(__classPrivateFieldGet(this, _AvroFileWriter_tap, "f"), header);
|
|
217
|
+
__classPrivateFieldSet(this, _AvroFileWriter_headerWritten, true, "f");
|
|
218
|
+
}, _AvroFileWriter_encodeRecord =
|
|
219
|
+
/**
|
|
220
|
+
* Encodes a record to bytes using the schema.
|
|
221
|
+
* @param record The record to encode.
|
|
222
|
+
* @returns The encoded record bytes.
|
|
223
|
+
*/
|
|
224
|
+
async function _AvroFileWriter_encodeRecord(record) {
|
|
225
|
+
const buffer = await __classPrivateFieldGet(this, _AvroFileWriter_schemaType, "f").toBuffer(record);
|
|
226
|
+
return new Uint8Array(buffer);
|
|
227
|
+
}, _AvroFileWriter_combinePending = function _AvroFileWriter_combinePending() {
|
|
228
|
+
const combined = new Uint8Array(__classPrivateFieldGet(this, _AvroFileWriter_pendingBytes, "f"));
|
|
229
|
+
let offset = 0;
|
|
230
|
+
for (const record of __classPrivateFieldGet(this, _AvroFileWriter_pendingRecords, "f")) {
|
|
231
|
+
combined.set(record, offset);
|
|
232
|
+
offset += record.length;
|
|
233
|
+
}
|
|
234
|
+
return combined;
|
|
235
|
+
};
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import type { IReadableBuffer } from "./buffer.js";
|
|
2
|
+
/**
|
|
3
|
+
* A read-only buffer implementation that provides random access read operations
|
|
4
|
+
* on data from a Blob. This class is useful for working with binary data
|
|
5
|
+
* from files, network responses, or other Blob sources.
|
|
6
|
+
*
|
|
7
|
+
* Reads are performed asynchronously directly from the Blob.
|
|
8
|
+
*
|
|
9
|
+
* Key features:
|
|
10
|
+
* - Blob-backed: Reads data directly from the Blob asynchronously.
|
|
11
|
+
* - Random access: Supports reading at arbitrary byte offsets.
|
|
12
|
+
* - Bounds checking: Operations that would exceed buffer bounds are safely ignored or return undefined.
|
|
13
|
+
* - Memory efficient: Doesn't load data into memory.
|
|
14
|
+
*
|
|
15
|
+
* @example
|
|
16
|
+
* ```typescript
|
|
17
|
+
* const blob = new Blob([new Uint8Array([1, 2, 3, 4])]);
|
|
18
|
+
* const buffer = new BlobReadableBuffer(blob);
|
|
19
|
+
*
|
|
20
|
+
* // Read some data asynchronously
|
|
21
|
+
* const data = await buffer.read(0, 4); // Returns Uint8Array([1, 2, 3, 4])
|
|
22
|
+
* ```
|
|
23
|
+
*/
|
|
24
|
+
export declare class BlobReadableBuffer implements IReadableBuffer {
|
|
25
|
+
#private;
|
|
26
|
+
/**
|
|
27
|
+
* Creates a new BlobReadableBuffer from the provided Blob.
|
|
28
|
+
*
|
|
29
|
+
* @param blob The Blob to read data from.
|
|
30
|
+
*/
|
|
31
|
+
constructor(blob: Blob);
|
|
32
|
+
/**
|
|
33
|
+
* Gets the total length of the buffer in bytes.
|
|
34
|
+
*
|
|
35
|
+
* @returns The buffer length in bytes.
|
|
36
|
+
*/
|
|
37
|
+
length(): Promise<number>;
|
|
38
|
+
/**
|
|
39
|
+
* Reads a sequence of bytes from the buffer starting at the specified offset.
|
|
40
|
+
* Reads asynchronously from the Blob.
|
|
41
|
+
*
|
|
42
|
+
* @param offset The byte offset to start reading from (0-based).
|
|
43
|
+
* @param size The number of bytes to read.
|
|
44
|
+
* @returns A Promise that resolves to a new Uint8Array containing the read bytes, or undefined if the read would exceed buffer bounds.
|
|
45
|
+
*/
|
|
46
|
+
read(offset: number, size: number): Promise<Uint8Array | undefined>;
|
|
47
|
+
/**
|
|
48
|
+
* Checks if more data can be read starting at the given offset.
|
|
49
|
+
* @param offset The byte offset to check.
|
|
50
|
+
* @returns True if at least one byte can be read from the offset.
|
|
51
|
+
*/
|
|
52
|
+
canReadMore(offset: number): Promise<boolean>;
|
|
53
|
+
}
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
|
|
2
|
+
if (kind === "m") throw new TypeError("Private method is not writable");
|
|
3
|
+
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
|
|
4
|
+
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
|
5
|
+
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
|
|
6
|
+
};
|
|
7
|
+
var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {
|
|
8
|
+
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
|
9
|
+
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
|
10
|
+
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
|
11
|
+
};
|
|
12
|
+
var _BlobReadableBuffer_blob;
|
|
13
|
+
/**
|
|
14
|
+
* A read-only buffer implementation that provides random access read operations
|
|
15
|
+
* on data from a Blob. This class is useful for working with binary data
|
|
16
|
+
* from files, network responses, or other Blob sources.
|
|
17
|
+
*
|
|
18
|
+
* Reads are performed asynchronously directly from the Blob.
|
|
19
|
+
*
|
|
20
|
+
* Key features:
|
|
21
|
+
* - Blob-backed: Reads data directly from the Blob asynchronously.
|
|
22
|
+
* - Random access: Supports reading at arbitrary byte offsets.
|
|
23
|
+
* - Bounds checking: Operations that would exceed buffer bounds are safely ignored or return undefined.
|
|
24
|
+
* - Memory efficient: Doesn't load data into memory.
|
|
25
|
+
*
|
|
26
|
+
* @example
|
|
27
|
+
* ```typescript
|
|
28
|
+
* const blob = new Blob([new Uint8Array([1, 2, 3, 4])]);
|
|
29
|
+
* const buffer = new BlobReadableBuffer(blob);
|
|
30
|
+
*
|
|
31
|
+
* // Read some data asynchronously
|
|
32
|
+
* const data = await buffer.read(0, 4); // Returns Uint8Array([1, 2, 3, 4])
|
|
33
|
+
* ```
|
|
34
|
+
*/
|
|
35
|
+
export class BlobReadableBuffer {
|
|
36
|
+
/**
|
|
37
|
+
* Creates a new BlobReadableBuffer from the provided Blob.
|
|
38
|
+
*
|
|
39
|
+
* @param blob The Blob to read data from.
|
|
40
|
+
*/
|
|
41
|
+
constructor(blob) {
|
|
42
|
+
_BlobReadableBuffer_blob.set(this, void 0);
|
|
43
|
+
__classPrivateFieldSet(this, _BlobReadableBuffer_blob, blob, "f");
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Gets the total length of the buffer in bytes.
|
|
47
|
+
*
|
|
48
|
+
* @returns The buffer length in bytes.
|
|
49
|
+
*/
|
|
50
|
+
// deno-lint-ignore require-await
|
|
51
|
+
async length() {
|
|
52
|
+
return __classPrivateFieldGet(this, _BlobReadableBuffer_blob, "f").size;
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Reads a sequence of bytes from the buffer starting at the specified offset.
|
|
56
|
+
* Reads asynchronously from the Blob.
|
|
57
|
+
*
|
|
58
|
+
* @param offset The byte offset to start reading from (0-based).
|
|
59
|
+
* @param size The number of bytes to read.
|
|
60
|
+
* @returns A Promise that resolves to a new Uint8Array containing the read bytes, or undefined if the read would exceed buffer bounds.
|
|
61
|
+
*/
|
|
62
|
+
async read(offset, size) {
|
|
63
|
+
if (offset + size > __classPrivateFieldGet(this, _BlobReadableBuffer_blob, "f").size)
|
|
64
|
+
return undefined;
|
|
65
|
+
// Read directly from the Blob
|
|
66
|
+
const sliced = __classPrivateFieldGet(this, _BlobReadableBuffer_blob, "f").slice(offset, offset + size);
|
|
67
|
+
const arrayBuffer = await sliced.arrayBuffer();
|
|
68
|
+
return new Uint8Array(arrayBuffer);
|
|
69
|
+
}
|
|
70
|
+
/**
|
|
71
|
+
* Checks if more data can be read starting at the given offset.
|
|
72
|
+
* @param offset The byte offset to check.
|
|
73
|
+
* @returns True if at least one byte can be read from the offset.
|
|
74
|
+
*/
|
|
75
|
+
async canReadMore(offset) {
|
|
76
|
+
const result = await this.read(offset, 1);
|
|
77
|
+
return result !== undefined;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
_BlobReadableBuffer_blob = new WeakMap();
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Interface describing a random-access readable buffer.
|
|
3
|
+
*/
|
|
4
|
+
export interface IReadableBuffer {
|
|
5
|
+
/**
|
|
6
|
+
* Reads a portion of the buffer starting at offset with the given size.
|
|
7
|
+
*/
|
|
8
|
+
read(offset: number, size: number): Promise<Uint8Array | undefined>;
|
|
9
|
+
/**
|
|
10
|
+
* Checks if more data can be read starting at the given offset.
|
|
11
|
+
*/
|
|
12
|
+
canReadMore(offset: number): Promise<boolean>;
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Interface describing an append-only writable buffer.
|
|
16
|
+
*/
|
|
17
|
+
export interface IWritableBuffer {
|
|
18
|
+
/**
|
|
19
|
+
* Appends bytes to the buffer, advancing its internal write cursor when the
|
|
20
|
+
* operation succeeds.
|
|
21
|
+
*/
|
|
22
|
+
appendBytes(data: Uint8Array): Promise<void>;
|
|
23
|
+
/**
|
|
24
|
+
* Returns whether the buffer can continue accepting writes. Implementations
|
|
25
|
+
* should flip this to `false` after a write would exceed capacity so callers
|
|
26
|
+
* can detect the overflow condition.
|
|
27
|
+
*/
|
|
28
|
+
isValid(): Promise<boolean>;
|
|
29
|
+
/**
|
|
30
|
+
* Checks if the buffer can accept appending the given number of bytes.
|
|
31
|
+
*/
|
|
32
|
+
canAppendMore(size: number): Promise<boolean>;
|
|
33
|
+
}
|
|
34
|
+
/**
|
|
35
|
+
* Convenience type for buffers capable of both read and write operations.
|
|
36
|
+
*/
|
|
37
|
+
export type IReadableAndWritableBuffer = IReadableBuffer & IWritableBuffer;
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|