@apibara/starknet 2.1.0-beta.4 → 2.1.0-beta.40
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +1655 -696
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +5178 -4389
- package/dist/index.d.mts +5178 -4389
- package/dist/index.d.ts +5178 -4389
- package/dist/index.mjs +1649 -691
- package/dist/index.mjs.map +1 -0
- package/dist/parser.cjs +59 -5
- package/dist/parser.cjs.map +1 -0
- package/dist/parser.d.cts +20 -13
- package/dist/parser.d.mts +20 -13
- package/dist/parser.d.ts +20 -13
- package/dist/parser.mjs +58 -6
- package/dist/parser.mjs.map +1 -0
- package/dist/shared/starknet.e649ecb1.d.cts +40 -0
- package/dist/shared/starknet.e649ecb1.d.mts +40 -0
- package/dist/shared/starknet.e649ecb1.d.ts +40 -0
- package/package.json +4 -6
- package/src/abi-wan-helpers.ts +181 -0
- package/src/abi.ts +6 -0
- package/src/block.ts +905 -423
- package/src/common.ts +20 -35
- package/src/event.ts +192 -44
- package/src/filter.ts +240 -239
- package/src/index.ts +3 -0
- package/src/parser.ts +102 -6
- package/src/proto/data.ts +1081 -1
- package/src/proto/filter.ts +76 -2
- package/dist/shared/starknet.2b19268a.d.cts +0 -32
- package/dist/shared/starknet.2b19268a.d.mts +0 -32
- package/dist/shared/starknet.2b19268a.d.ts +0 -32
- package/src/common.test.ts +0 -21
- package/src/filter.test.ts +0 -832
- package/src/helpers.ts +0 -8
- package/src/parser.test.ts +0 -169
package/dist/parser.cjs
CHANGED
|
@@ -37,7 +37,7 @@ function parseU256(data, offset) {
|
|
|
37
37
|
function parseAsHex(data, offset) {
|
|
38
38
|
assertInBounds(data, offset);
|
|
39
39
|
return {
|
|
40
|
-
out:
|
|
40
|
+
out: data[offset],
|
|
41
41
|
offset: offset + 1
|
|
42
42
|
};
|
|
43
43
|
}
|
|
@@ -53,7 +53,7 @@ function parseFelt252(data, offset) {
|
|
|
53
53
|
offset: offset + 1
|
|
54
54
|
};
|
|
55
55
|
}
|
|
56
|
-
function parseEmpty(
|
|
56
|
+
function parseEmpty(_data, offset) {
|
|
57
57
|
return { out: null, offset };
|
|
58
58
|
}
|
|
59
59
|
function parseArray(type) {
|
|
@@ -84,16 +84,35 @@ function parseStruct(parsers) {
|
|
|
84
84
|
const sortedParsers = Object.entries(parsers).sort(
|
|
85
85
|
(a, b) => a[1].index - b[1].index
|
|
86
86
|
);
|
|
87
|
-
|
|
87
|
+
const parser = (data, startingOffset) => {
|
|
88
88
|
let offset = startingOffset;
|
|
89
89
|
const out = {};
|
|
90
|
-
for (const [key, { parser }] of sortedParsers) {
|
|
91
|
-
const { out: value, offset: newOffset } =
|
|
90
|
+
for (const [key, { parser: parser2 }] of sortedParsers) {
|
|
91
|
+
const { out: value, offset: newOffset } = parser2(data, offset);
|
|
92
92
|
out[key] = value;
|
|
93
93
|
offset = newOffset;
|
|
94
94
|
}
|
|
95
95
|
return { out, offset };
|
|
96
96
|
};
|
|
97
|
+
return parser;
|
|
98
|
+
}
|
|
99
|
+
function parseEnum(parsers) {
|
|
100
|
+
return (data, startingOffset) => {
|
|
101
|
+
const selectorFelt = data[startingOffset];
|
|
102
|
+
const selector = Number(BigInt(selectorFelt));
|
|
103
|
+
const parserEntry = Object.entries(parsers).find(
|
|
104
|
+
([, { index }]) => index === selector
|
|
105
|
+
);
|
|
106
|
+
if (!parserEntry) {
|
|
107
|
+
throw new ParseError(`Unknown enum variant selector: ${selector}`);
|
|
108
|
+
}
|
|
109
|
+
const [variantName, { parser }] = parserEntry;
|
|
110
|
+
const { out, offset: newOffset } = parser(data, startingOffset + 1);
|
|
111
|
+
return {
|
|
112
|
+
out: { _tag: variantName, [variantName]: out },
|
|
113
|
+
offset: newOffset
|
|
114
|
+
};
|
|
115
|
+
};
|
|
97
116
|
}
|
|
98
117
|
function parseTuple(...parsers) {
|
|
99
118
|
return (data, startingOffset) => {
|
|
@@ -107,16 +126,50 @@ function parseTuple(...parsers) {
|
|
|
107
126
|
return { out, offset };
|
|
108
127
|
};
|
|
109
128
|
}
|
|
129
|
+
const parseByteArrayStruct = parseStruct({
|
|
130
|
+
data: {
|
|
131
|
+
index: 0,
|
|
132
|
+
parser: parseArray(parseBytes31)
|
|
133
|
+
},
|
|
134
|
+
pendingWord: { index: 1, parser: parseFelt252 },
|
|
135
|
+
pendingWordLen: { index: 2, parser: parseU32 }
|
|
136
|
+
});
|
|
137
|
+
function parseByteArray(data, offset) {
|
|
138
|
+
const { out, offset: offsetOut } = parseByteArrayStruct(data, offset);
|
|
139
|
+
const dataBytes = out.data.map((bytes2) => bytes2.slice(2).padStart(62, "0")).join("");
|
|
140
|
+
let pending = out.pendingWord.toString(16);
|
|
141
|
+
const pendingWordLength = Number(out.pendingWordLen);
|
|
142
|
+
if (pending.length < pendingWordLength * 2) {
|
|
143
|
+
pending = pending.padStart(pendingWordLength * 2, "0");
|
|
144
|
+
}
|
|
145
|
+
const pendingBytes = pending.slice(pending.length - 2 * pendingWordLength);
|
|
146
|
+
const bytes = removeLeadingZeros(dataBytes + pendingBytes);
|
|
147
|
+
return { out: `0x${bytes}`, offset: offsetOut };
|
|
148
|
+
}
|
|
149
|
+
function removeLeadingZeros(bytes) {
|
|
150
|
+
for (let i = 0; i < bytes.length; i++) {
|
|
151
|
+
if (bytes[i] !== "0") {
|
|
152
|
+
let j = i;
|
|
153
|
+
if (i % 2 !== 0) {
|
|
154
|
+
j -= 1;
|
|
155
|
+
}
|
|
156
|
+
return bytes.slice(j);
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
return "00";
|
|
160
|
+
}
|
|
110
161
|
|
|
111
162
|
exports.ParseError = ParseError;
|
|
112
163
|
exports.parseArray = parseArray;
|
|
113
164
|
exports.parseAsBigInt = parseAsBigInt;
|
|
114
165
|
exports.parseAsHex = parseAsHex;
|
|
115
166
|
exports.parseBool = parseBool;
|
|
167
|
+
exports.parseByteArray = parseByteArray;
|
|
116
168
|
exports.parseBytes31 = parseBytes31;
|
|
117
169
|
exports.parseClassHash = parseClassHash;
|
|
118
170
|
exports.parseContractAddress = parseContractAddress;
|
|
119
171
|
exports.parseEmpty = parseEmpty;
|
|
172
|
+
exports.parseEnum = parseEnum;
|
|
120
173
|
exports.parseEthAddress = parseEthAddress;
|
|
121
174
|
exports.parseFelt252 = parseFelt252;
|
|
122
175
|
exports.parseOption = parseOption;
|
|
@@ -131,3 +184,4 @@ exports.parseU32 = parseU32;
|
|
|
131
184
|
exports.parseU64 = parseU64;
|
|
132
185
|
exports.parseU8 = parseU8;
|
|
133
186
|
exports.parseUsize = parseUsize;
|
|
187
|
+
//# sourceMappingURL=parser.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"parser.cjs","sources":["../src/parser.ts"],"sourcesContent":["/*\n * Calldata combinatorial parsers.\n *\n * Based on the Ekubo's event parser.\n *\n * MIT License\n *\n * Copyright (c) 2023 Ekubo, Inc.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\nimport type { FieldElement } from \"./common\";\n\nexport type Parser<TOut> = (\n data: readonly FieldElement[],\n offset: number,\n) => { out: TOut; offset: number };\n\nexport class ParseError extends Error {\n constructor(message: string) {\n super(message);\n this.name = \"ParseError\";\n }\n}\n\n// Primitive types.\n\nfunction assertInBounds(data: readonly FieldElement[], offset: number) {\n if (offset >= data.length) {\n throw new ParseError(\n `Offset out of bounds. Data length ${data.length}, offset ${offset}`,\n );\n }\n}\n\nexport function parseBool(data: readonly FieldElement[], offset: number) {\n assertInBounds(data, offset);\n return { out: BigInt(data[offset]) > 0n, offset: offset + 1 };\n}\n\nexport function parseAsBigInt(data: readonly FieldElement[], offset: number) {\n assertInBounds(data, offset);\n return { out: BigInt(data[offset]), offset: offset + 1 };\n}\n\nexport const parseU8 = parseAsBigInt;\nexport const parseU16 = parseAsBigInt;\nexport const parseU32 = parseAsBigInt;\nexport const parseU64 = parseAsBigInt;\nexport const parseU128 = parseAsBigInt;\nexport const parseUsize = parseAsBigInt;\n\nexport function parseU256(data: readonly FieldElement[], offset: number) {\n assertInBounds(data, offset + 1);\n return {\n out: BigInt(data[offset]) + (BigInt(data[offset + 1]) << 128n),\n offset: offset + 2,\n };\n}\n\nexport function parseAsHex(data: readonly FieldElement[], offset: number) {\n assertInBounds(data, offset);\n return {\n out: data[offset],\n offset: offset + 1,\n };\n}\n\nexport const parseContractAddress = parseAsHex;\nexport const parseEthAddress = parseAsHex;\nexport const parseStorageAddress = parseAsHex;\nexport const parseClassHash = parseAsHex;\nexport const parseBytes31 = parseAsHex;\n\nexport function parseFelt252(data: readonly FieldElement[], offset: number) {\n assertInBounds(data, offset);\n return {\n out: BigInt(data[offset]),\n offset: offset + 1,\n };\n}\n\nexport function parseEmpty(_data: readonly FieldElement[], offset: number) {\n return { out: null, offset };\n}\n\n// Higher-level types.\n\nexport function parseArray<T>(type: Parser<T>): Parser<T[]> {\n return (data: readonly FieldElement[], startingOffset: number) => {\n let offset = startingOffset;\n const length = BigInt(data[offset]);\n\n offset++;\n\n const out: T[] = [];\n for (let i = 0; i < length; i++) {\n const { out: item, offset: newOffset } = type(data, offset);\n out.push(item);\n offset = newOffset;\n }\n\n return { out, offset };\n };\n}\n\nexport const parseSpan = parseArray;\n\nexport function parseOption<T>(type: Parser<T>) {\n return (data: readonly FieldElement[], offset: number) => {\n const hasValue = BigInt(data[offset]) === 1n;\n if (hasValue) {\n return type(data, offset + 1);\n }\n return { out: null, offset: offset + 1 };\n };\n}\n\nexport function parseStruct<T extends Record<string, unknown>>(\n parsers: {\n [K in keyof T]: { index: number; parser: Parser<T[K]> };\n },\n): Parser<{ [K in keyof T]: T[K] }> {\n const sortedParsers = Object.entries(parsers).sort(\n (a, b) => a[1].index - b[1].index,\n );\n const parser = (data: readonly FieldElement[], startingOffset: number) => {\n let offset = startingOffset;\n const out: Record<string, unknown> = {};\n for (const [key, { parser }] of sortedParsers) {\n const { out: value, offset: newOffset } = parser(data, offset);\n out[key] = value;\n offset = newOffset;\n }\n return { out, offset };\n };\n return parser as Parser<{ [K in keyof T]: T[K] }>;\n}\n\nexport function parseEnum<T extends Record<string, unknown>>(\n parsers: {\n [K in keyof T]: { index: number; parser: Parser<T[K]> };\n },\n): Parser<T[keyof T]> {\n return (data: readonly FieldElement[], startingOffset: number) => {\n const selectorFelt = data[startingOffset];\n const selector = Number(BigInt(selectorFelt));\n\n // Find the parser by index\n const parserEntry = Object.entries(parsers).find(\n ([, { index }]) => index === selector,\n );\n\n if (!parserEntry) {\n throw new ParseError(`Unknown enum variant selector: ${selector}`);\n }\n\n const [variantName, { parser }] = parserEntry;\n const { out, offset: newOffset } = parser(data, startingOffset + 1);\n\n return {\n out: { _tag: variantName, [variantName]: out } as T[keyof T],\n offset: newOffset,\n };\n };\n}\n\nexport function parseTuple<T extends Parser<unknown>[]>(\n ...parsers: T\n): Parser<UnwrapParsers<T>> {\n return (data: readonly FieldElement[], startingOffset: number) => {\n let offset = startingOffset;\n const out = [];\n for (const parser of parsers) {\n const { out: value, offset: newOffset } = parser(data, offset);\n out.push(value);\n offset = newOffset;\n }\n return { out, offset } as { out: UnwrapParsers<T>; offset: number };\n };\n}\n\ntype UnwrapParsers<TP> = {\n [Index in keyof TP]: TP[Index] extends Parser<infer U> ? U : never;\n};\n\nconst parseByteArrayStruct = parseStruct({\n data: {\n index: 0,\n parser: parseArray(parseBytes31),\n },\n pendingWord: { index: 1, parser: parseFelt252 },\n pendingWordLen: { index: 2, parser: parseU32 },\n});\n\nexport function parseByteArray(data: readonly FieldElement[], offset: number) {\n // A ByteArray is a struct with the following abi:\n //\n // {\n // name: \"core::byte_array::ByteArray\",\n // type: \"struct\",\n // members: [\n // {\n // name: \"data\",\n // type: \"core::array::Array::<core::bytes_31::bytes31>\",\n // },\n // {\n // name: \"pending_word\",\n // type: \"core::felt252\",\n // },\n // {\n // name: \"pending_word_len\",\n // type: \"core::integer::u32\",\n // },\n // ],\n // },\n //\n // We first parse it using a parser for that struct, then convert it to the output `0x${string}` type.\n const { out, offset: offsetOut } = parseByteArrayStruct(data, offset);\n\n // Remove 0x prefix from data elements and pad them to 31 bytes.\n const dataBytes = out.data\n .map((bytes) => bytes.slice(2).padStart(62, \"0\"))\n .join(\"\");\n\n let pending = out.pendingWord.toString(16);\n const pendingWordLength = Number(out.pendingWordLen);\n if (pending.length < pendingWordLength * 2) {\n pending = pending.padStart(pendingWordLength * 2, \"0\");\n }\n\n const pendingBytes = pending.slice(pending.length - 2 * pendingWordLength);\n const bytes = removeLeadingZeros(dataBytes + pendingBytes);\n\n return { out: `0x${bytes}`, offset: offsetOut };\n}\n\nfunction removeLeadingZeros(bytes: string): string {\n for (let i = 0; i < bytes.length; i++) {\n if (bytes[i] !== \"0\") {\n let j = i;\n if (i % 2 !== 0) {\n j -= 1;\n }\n return bytes.slice(j);\n }\n }\n // The bytes are all 0, so return something reasonable.\n return \"00\";\n}\n"],"names":["parser","bytes"],"mappings":";;AAkCO,MAAM,mBAAmB,KAAM,CAAA;AAAA,EACpC,YAAY,OAAiB,EAAA;AAC3B,IAAA,KAAA,CAAM,OAAO,CAAA,CAAA;AACb,IAAA,IAAA,CAAK,IAAO,GAAA,YAAA,CAAA;AAAA,GACd;AACF,CAAA;AAIA,SAAS,cAAA,CAAe,MAA+B,MAAgB,EAAA;AACrE,EAAI,IAAA,MAAA,IAAU,KAAK,MAAQ,EAAA;AACzB,IAAA,MAAM,IAAI,UAAA;AAAA,MACR,CAAqC,kCAAA,EAAA,IAAA,CAAK,MAAM,CAAA,SAAA,EAAY,MAAM,CAAA,CAAA;AAAA,KACpE,CAAA;AAAA,GACF;AACF,CAAA;AAEgB,SAAA,SAAA,CAAU,MAA+B,MAAgB,EAAA;AACvE,EAAA,cAAA,CAAe,MAAM,MAAM,CAAA,CAAA;AAC3B,EAAO,OAAA,EAAE,GAAK,EAAA,MAAA,CAAO,IAAK,CAAA,MAAM,CAAC,CAAI,GAAA,EAAA,EAAI,MAAQ,EAAA,MAAA,GAAS,CAAE,EAAA,CAAA;AAC9D,CAAA;AAEgB,SAAA,aAAA,CAAc,MAA+B,MAAgB,EAAA;AAC3E,EAAA,cAAA,CAAe,MAAM,MAAM,CAAA,CAAA;AAC3B,EAAO,OAAA,EAAE,KAAK,MAAO,CAAA,IAAA,CAAK,MAAM,CAAC,CAAA,EAAG,MAAQ,EAAA,MAAA,GAAS,CAAE,EAAA,CAAA;AACzD,CAAA;AAEO,MAAM,OAAU,GAAA,cAAA;AAChB,MAAM,QAAW,GAAA,cAAA;AACjB,MAAM,QAAW,GAAA,cAAA;AACjB,MAAM,QAAW,GAAA,cAAA;AACjB,MAAM,SAAY,GAAA,cAAA;AAClB,MAAM,UAAa,GAAA,cAAA;AAEV,SAAA,SAAA,CAAU,MAA+B,MAAgB,EAAA;AACvE,EAAe,cAAA,CAAA,IAAA,EAAM,SAAS,CAAC,CAAA,CAAA;AAC/B,EAAO,OAAA;AAAA,IACL,GAAA,EAAK,MAAO,CAAA,IAAA,CAAK,MAAM,CAAC,CAAK,IAAA,MAAA,CAAO,IAAK,CAAA,MAAA,GAAS,CAAC,CAAC,CAAK,IAAA,IAAA,CAAA;AAAA,IACzD,QAAQ,MAAS,GAAA,CAAA;AAAA,GACnB,CAAA;AACF,CAAA;AAEgB,SAAA,UAAA,CAAW,MAA+B,MAAgB,EAAA;AACxE,EAAA,cAAA,CAAe,MAAM,MAAM,CAAA,CAAA;AAC3B,EAAO,OAAA;AAAA,IACL,GAAA,EAAK,KAAK,MAAM,CAAA;AAAA,IAChB,QAAQ,MAAS,GAAA,CAAA;AAAA,GACnB,CAAA;AACF,CAAA;AAEO,MAAM,oBAAuB,GAAA,WAAA;AAC7B,MAAM,eAAkB,GAAA,WAAA;AACxB,MAAM,mBAAsB,GAAA,WAAA;AAC5B,MAAM,cAAiB,GAAA,WAAA;AACvB,MAAM,YAAe,GAAA,WAAA;AAEZ,SAAA,YAAA,CAAa,MAA+B,MAAgB,EAAA;AAC1E,EAAA,cAAA,CAAe,MAAM,MAAM,CAAA,CAAA;AAC3B,EAAO,OAAA;AAAA,IACL,GAAK,EAAA,MAAA,CAAO,IAAK,CAAA,MAAM,CAAC,CAAA;AAAA,IACxB,QAAQ,MAAS,GAAA,CAAA;AAAA,GACnB,CAAA;AACF,CAAA;AAEgB,SAAA,UAAA,CAAW,OAAgC,MAAgB,EAAA;AACzE,EAAO,OAAA,EAAE,GAAK,EAAA,IAAA,EAAM,MAAO,EAAA,CAAA;AAC7B,CAAA;AAIO,SAAS,WAAc,IAA8B,EAAA;AAC1D,EAAO,OAAA,CAAC,MAA+B,cAA2B,KAAA;AAChE,IAAA,IAAI,MAAS,GAAA,cAAA,CAAA;AACb,IAAA,MAAM,MAAS,GAAA,MAAA,CAAO,IAAK,CAAA,MAAM,CAAC,CAAA,CAAA;AAElC,IAAA,MAAA,EAAA,CAAA;AAEA,IAAA,MAAM,MAAW,EAAC,CAAA;AAClB,IAAA,KAAA,IAAS,CAAI,GAAA,CAAA,EAAG,CAAI,GAAA,MAAA,EAAQ,CAAK,EAAA,EAAA;AAC/B,MAAM,MAAA,EAAE,KAAK,IAAM,EAAA,MAAA,EAAQ,WAAc,GAAA,IAAA,CAAK,MAAM,MAAM,CAAA,CAAA;AAC1D,MAAA,GAAA,CAAI,KAAK,IAAI,CAAA,CAAA;AACb,MAAS,MAAA,GAAA,SAAA,CAAA;AAAA,KACX;AAEA,IAAO,OAAA,EAAE,KAAK,MAAO,EAAA,CAAA;AAAA,GACvB,CAAA;AACF,CAAA;AAEO,MAAM,SAAY,GAAA,WAAA;AAElB,SAAS,YAAe,IAAiB,EAAA;AAC9C,EAAO,OAAA,CAAC,MAA+B,MAAmB,KAAA;AACxD,IAAA,MAAM,QAAW,GAAA,MAAA,CAAO,IAAK,CAAA,MAAM,CAAC,CAAM,KAAA,EAAA,CAAA;AAC1C,IAAA,IAAI,QAAU,EAAA;AACZ,MAAO,OAAA,IAAA,CAAK,IAAM,EAAA,MAAA,GAAS,CAAC,CAAA,CAAA;AAAA,KAC9B;AACA,IAAA,OAAO,EAAE,GAAA,EAAK,IAAM,EAAA,MAAA,EAAQ,SAAS,CAAE,EAAA,CAAA;AAAA,GACzC,CAAA;AACF,CAAA;AAEO,SAAS,YACd,OAGkC,EAAA;AAClC,EAAA,MAAM,aAAgB,GAAA,MAAA,CAAO,OAAQ,CAAA,OAAO,CAAE,CAAA,IAAA;AAAA,IAC5C,CAAC,GAAG,CAAM,KAAA,CAAA,CAAE,CAAC,CAAE,CAAA,KAAA,GAAQ,CAAE,CAAA,CAAC,CAAE,CAAA,KAAA;AAAA,GAC9B,CAAA;AACA,EAAM,MAAA,MAAA,GAAS,CAAC,IAAA,EAA+B,cAA2B,KAAA;AACxE,IAAA,IAAI,MAAS,GAAA,cAAA,CAAA;AACb,IAAA,MAAM,MAA+B,EAAC,CAAA;AACtC,IAAA,KAAA,MAAW,CAAC,GAAK,EAAA,EAAE,QAAAA,OAAO,EAAC,KAAK,aAAe,EAAA;AAC7C,MAAM,MAAA,EAAE,KAAK,KAAO,EAAA,MAAA,EAAQ,WAAcA,GAAAA,OAAAA,CAAO,MAAM,MAAM,CAAA,CAAA;AAC7D,MAAA,GAAA,CAAI,GAAG,CAAI,GAAA,KAAA,CAAA;AACX,MAAS,MAAA,GAAA,SAAA,CAAA;AAAA,KACX;AACA,IAAO,OAAA,EAAE,KAAK,MAAO,EAAA,CAAA;AAAA,GACvB,CAAA;AACA,EAAO,OAAA,MAAA,CAAA;AACT,CAAA;AAEO,SAAS,UACd,OAGoB,EAAA;AACpB,EAAO,OAAA,CAAC,MAA+B,cAA2B,KAAA;AAChE,IAAM,MAAA,YAAA,GAAe,KAAK,cAAc,CAAA,CAAA;AACxC,IAAA,MAAM,QAAW,GAAA,MAAA,CAAO,MAAO,CAAA,YAAY,CAAC,CAAA,CAAA;AAG5C,IAAA,MAAM,WAAc,GAAA,MAAA,CAAO,OAAQ,CAAA,OAAO,CAAE,CAAA,IAAA;AAAA,MAC1C,CAAC,GAAG,EAAE,KAAM,EAAC,MAAM,KAAU,KAAA,QAAA;AAAA,KAC/B,CAAA;AAEA,IAAA,IAAI,CAAC,WAAa,EAAA;AAChB,MAAA,MAAM,IAAI,UAAA,CAAW,CAAkC,+BAAA,EAAA,QAAQ,CAAE,CAAA,CAAA,CAAA;AAAA,KACnE;AAEA,IAAA,MAAM,CAAC,WAAA,EAAa,EAAE,MAAA,EAAQ,CAAI,GAAA,WAAA,CAAA;AAClC,IAAM,MAAA,EAAE,KAAK,MAAQ,EAAA,SAAA,KAAc,MAAO,CAAA,IAAA,EAAM,iBAAiB,CAAC,CAAA,CAAA;AAElE,IAAO,OAAA;AAAA,MACL,KAAK,EAAE,IAAA,EAAM,aAAa,CAAC,WAAW,GAAG,GAAI,EAAA;AAAA,MAC7C,MAAQ,EAAA,SAAA;AAAA,KACV,CAAA;AAAA,GACF,CAAA;AACF,CAAA;AAEO,SAAS,cACX,OACuB,EAAA;AAC1B,EAAO,OAAA,CAAC,MAA+B,cAA2B,KAAA;AAChE,IAAA,IAAI,MAAS,GAAA,cAAA,CAAA;AACb,IAAA,MAAM,MAAM,EAAC,CAAA;AACb,IAAA,KAAA,MAAW,UAAU,OAAS,EAAA;AAC5B,MAAM,MAAA,EAAE,KAAK,KAAO,EAAA,MAAA,EAAQ,WAAc,GAAA,MAAA,CAAO,MAAM,MAAM,CAAA,CAAA;AAC7D,MAAA,GAAA,CAAI,KAAK,KAAK,CAAA,CAAA;AACd,MAAS,MAAA,GAAA,SAAA,CAAA;AAAA,KACX;AACA,IAAO,OAAA,EAAE,KAAK,MAAO,EAAA,CAAA;AAAA,GACvB,CAAA;AACF,CAAA;AAMA,MAAM,uBAAuB,WAAY,CAAA;AAAA,EACvC,IAAM,EAAA;AAAA,IACJ,KAAO,EAAA,CAAA;AAAA,IACP,MAAA,EAAQ,WAAW,YAAY,CAAA;AAAA,GACjC;AAAA,EACA,WAAa,EAAA,EAAE,KAAO,EAAA,CAAA,EAAG,QAAQ,YAAa,EAAA;AAAA,EAC9C,cAAgB,EAAA,EAAE,KAAO,EAAA,CAAA,EAAG,QAAQ,QAAS,EAAA;AAC/C,CAAC,CAAA,CAAA;AAEe,SAAA,cAAA,CAAe,MAA+B,MAAgB,EAAA;AAuB5E,EAAA,MAAM,EAAE,GAAK,EAAA,MAAA,EAAQ,WAAc,GAAA,oBAAA,CAAqB,MAAM,MAAM,CAAA,CAAA;AAGpE,EAAA,MAAM,YAAY,GAAI,CAAA,IAAA,CACnB,GAAI,CAAA,CAACC,WAAUA,MAAM,CAAA,KAAA,CAAM,CAAC,CAAA,CAAE,SAAS,EAAI,EAAA,GAAG,CAAC,CAAA,CAC/C,KAAK,EAAE,CAAA,CAAA;AAEV,EAAA,IAAI,OAAU,GAAA,GAAA,CAAI,WAAY,CAAA,QAAA,CAAS,EAAE,CAAA,CAAA;AACzC,EAAM,MAAA,iBAAA,GAAoB,MAAO,CAAA,GAAA,CAAI,cAAc,CAAA,CAAA;AACnD,EAAI,IAAA,OAAA,CAAQ,MAAS,GAAA,iBAAA,GAAoB,CAAG,EAAA;AAC1C,IAAA,OAAA,GAAU,OAAQ,CAAA,QAAA,CAAS,iBAAoB,GAAA,CAAA,EAAG,GAAG,CAAA,CAAA;AAAA,GACvD;AAEA,EAAA,MAAM,eAAe,OAAQ,CAAA,KAAA,CAAM,OAAQ,CAAA,MAAA,GAAS,IAAI,iBAAiB,CAAA,CAAA;AACzE,EAAM,MAAA,KAAA,GAAQ,kBAAmB,CAAA,SAAA,GAAY,YAAY,CAAA,CAAA;AAEzD,EAAA,OAAO,EAAE,GAAK,EAAA,CAAA,EAAA,EAAK,KAAK,CAAA,CAAA,EAAI,QAAQ,SAAU,EAAA,CAAA;AAChD,CAAA;AAEA,SAAS,mBAAmB,KAAuB,EAAA;AACjD,EAAA,KAAA,IAAS,CAAI,GAAA,CAAA,EAAG,CAAI,GAAA,KAAA,CAAM,QAAQ,CAAK,EAAA,EAAA;AACrC,IAAI,IAAA,KAAA,CAAM,CAAC,CAAA,KAAM,GAAK,EAAA;AACpB,MAAA,IAAI,CAAI,GAAA,CAAA,CAAA;AACR,MAAI,IAAA,CAAA,GAAI,MAAM,CAAG,EAAA;AACf,QAAK,CAAA,IAAA,CAAA,CAAA;AAAA,OACP;AACA,MAAO,OAAA,KAAA,CAAM,MAAM,CAAC,CAAA,CAAA;AAAA,KACtB;AAAA,GACF;AAEA,EAAO,OAAA,IAAA,CAAA;AACT;;;;;;;;;;;;;;;;;;;;;;;;;;;;"}
|
package/dist/parser.d.cts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import '@
|
|
3
|
-
import '
|
|
1
|
+
import { a as FieldElement } from './shared/starknet.e649ecb1.cjs';
|
|
2
|
+
import '@apibara/protocol/codec';
|
|
3
|
+
import 'protobufjs/minimal.js';
|
|
4
4
|
|
|
5
5
|
type Parser<TOut> = (data: readonly FieldElement[], offset: number) => {
|
|
6
6
|
out: TOut;
|
|
@@ -28,7 +28,7 @@ declare function parseU256(data: readonly FieldElement[], offset: number): {
|
|
|
28
28
|
offset: number;
|
|
29
29
|
};
|
|
30
30
|
declare function parseAsHex(data: readonly FieldElement[], offset: number): {
|
|
31
|
-
out: string
|
|
31
|
+
out: `0x${string}`;
|
|
32
32
|
offset: number;
|
|
33
33
|
};
|
|
34
34
|
declare const parseContractAddress: typeof parseAsHex;
|
|
@@ -40,7 +40,7 @@ declare function parseFelt252(data: readonly FieldElement[], offset: number): {
|
|
|
40
40
|
out: bigint;
|
|
41
41
|
offset: number;
|
|
42
42
|
};
|
|
43
|
-
declare function parseEmpty(
|
|
43
|
+
declare function parseEmpty(_data: readonly FieldElement[], offset: number): {
|
|
44
44
|
out: null;
|
|
45
45
|
offset: number;
|
|
46
46
|
};
|
|
@@ -53,20 +53,27 @@ declare function parseOption<T>(type: Parser<T>): (data: readonly FieldElement[]
|
|
|
53
53
|
out: null;
|
|
54
54
|
offset: number;
|
|
55
55
|
};
|
|
56
|
-
declare function parseStruct<T extends {
|
|
57
|
-
[key: string]: unknown;
|
|
58
|
-
}>(parsers: {
|
|
56
|
+
declare function parseStruct<T extends Record<string, unknown>>(parsers: {
|
|
59
57
|
[K in keyof T]: {
|
|
60
58
|
index: number;
|
|
61
59
|
parser: Parser<T[K]>;
|
|
62
60
|
};
|
|
63
|
-
}):
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
61
|
+
}): Parser<{
|
|
62
|
+
[K in keyof T]: T[K];
|
|
63
|
+
}>;
|
|
64
|
+
declare function parseEnum<T extends Record<string, unknown>>(parsers: {
|
|
65
|
+
[K in keyof T]: {
|
|
66
|
+
index: number;
|
|
67
|
+
parser: Parser<T[K]>;
|
|
68
|
+
};
|
|
69
|
+
}): Parser<T[keyof T]>;
|
|
67
70
|
declare function parseTuple<T extends Parser<unknown>[]>(...parsers: T): Parser<UnwrapParsers<T>>;
|
|
68
71
|
type UnwrapParsers<TP> = {
|
|
69
72
|
[Index in keyof TP]: TP[Index] extends Parser<infer U> ? U : never;
|
|
70
73
|
};
|
|
74
|
+
declare function parseByteArray(data: readonly FieldElement[], offset: number): {
|
|
75
|
+
out: string;
|
|
76
|
+
offset: number;
|
|
77
|
+
};
|
|
71
78
|
|
|
72
|
-
export { ParseError, type Parser, parseArray, parseAsBigInt, parseAsHex, parseBool, parseBytes31, parseClassHash, parseContractAddress, parseEmpty, parseEthAddress, parseFelt252, parseOption, parseSpan, parseStorageAddress, parseStruct, parseTuple, parseU128, parseU16, parseU256, parseU32, parseU64, parseU8, parseUsize };
|
|
79
|
+
export { ParseError, type Parser, parseArray, parseAsBigInt, parseAsHex, parseBool, parseByteArray, parseBytes31, parseClassHash, parseContractAddress, parseEmpty, parseEnum, parseEthAddress, parseFelt252, parseOption, parseSpan, parseStorageAddress, parseStruct, parseTuple, parseU128, parseU16, parseU256, parseU32, parseU64, parseU8, parseUsize };
|
package/dist/parser.d.mts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import '@
|
|
3
|
-
import '
|
|
1
|
+
import { a as FieldElement } from './shared/starknet.e649ecb1.mjs';
|
|
2
|
+
import '@apibara/protocol/codec';
|
|
3
|
+
import 'protobufjs/minimal.js';
|
|
4
4
|
|
|
5
5
|
type Parser<TOut> = (data: readonly FieldElement[], offset: number) => {
|
|
6
6
|
out: TOut;
|
|
@@ -28,7 +28,7 @@ declare function parseU256(data: readonly FieldElement[], offset: number): {
|
|
|
28
28
|
offset: number;
|
|
29
29
|
};
|
|
30
30
|
declare function parseAsHex(data: readonly FieldElement[], offset: number): {
|
|
31
|
-
out: string
|
|
31
|
+
out: `0x${string}`;
|
|
32
32
|
offset: number;
|
|
33
33
|
};
|
|
34
34
|
declare const parseContractAddress: typeof parseAsHex;
|
|
@@ -40,7 +40,7 @@ declare function parseFelt252(data: readonly FieldElement[], offset: number): {
|
|
|
40
40
|
out: bigint;
|
|
41
41
|
offset: number;
|
|
42
42
|
};
|
|
43
|
-
declare function parseEmpty(
|
|
43
|
+
declare function parseEmpty(_data: readonly FieldElement[], offset: number): {
|
|
44
44
|
out: null;
|
|
45
45
|
offset: number;
|
|
46
46
|
};
|
|
@@ -53,20 +53,27 @@ declare function parseOption<T>(type: Parser<T>): (data: readonly FieldElement[]
|
|
|
53
53
|
out: null;
|
|
54
54
|
offset: number;
|
|
55
55
|
};
|
|
56
|
-
declare function parseStruct<T extends {
|
|
57
|
-
[key: string]: unknown;
|
|
58
|
-
}>(parsers: {
|
|
56
|
+
declare function parseStruct<T extends Record<string, unknown>>(parsers: {
|
|
59
57
|
[K in keyof T]: {
|
|
60
58
|
index: number;
|
|
61
59
|
parser: Parser<T[K]>;
|
|
62
60
|
};
|
|
63
|
-
}):
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
61
|
+
}): Parser<{
|
|
62
|
+
[K in keyof T]: T[K];
|
|
63
|
+
}>;
|
|
64
|
+
declare function parseEnum<T extends Record<string, unknown>>(parsers: {
|
|
65
|
+
[K in keyof T]: {
|
|
66
|
+
index: number;
|
|
67
|
+
parser: Parser<T[K]>;
|
|
68
|
+
};
|
|
69
|
+
}): Parser<T[keyof T]>;
|
|
67
70
|
declare function parseTuple<T extends Parser<unknown>[]>(...parsers: T): Parser<UnwrapParsers<T>>;
|
|
68
71
|
type UnwrapParsers<TP> = {
|
|
69
72
|
[Index in keyof TP]: TP[Index] extends Parser<infer U> ? U : never;
|
|
70
73
|
};
|
|
74
|
+
declare function parseByteArray(data: readonly FieldElement[], offset: number): {
|
|
75
|
+
out: string;
|
|
76
|
+
offset: number;
|
|
77
|
+
};
|
|
71
78
|
|
|
72
|
-
export { ParseError, type Parser, parseArray, parseAsBigInt, parseAsHex, parseBool, parseBytes31, parseClassHash, parseContractAddress, parseEmpty, parseEthAddress, parseFelt252, parseOption, parseSpan, parseStorageAddress, parseStruct, parseTuple, parseU128, parseU16, parseU256, parseU32, parseU64, parseU8, parseUsize };
|
|
79
|
+
export { ParseError, type Parser, parseArray, parseAsBigInt, parseAsHex, parseBool, parseByteArray, parseBytes31, parseClassHash, parseContractAddress, parseEmpty, parseEnum, parseEthAddress, parseFelt252, parseOption, parseSpan, parseStorageAddress, parseStruct, parseTuple, parseU128, parseU16, parseU256, parseU32, parseU64, parseU8, parseUsize };
|
package/dist/parser.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import '@
|
|
3
|
-
import '
|
|
1
|
+
import { a as FieldElement } from './shared/starknet.e649ecb1.js';
|
|
2
|
+
import '@apibara/protocol/codec';
|
|
3
|
+
import 'protobufjs/minimal.js';
|
|
4
4
|
|
|
5
5
|
type Parser<TOut> = (data: readonly FieldElement[], offset: number) => {
|
|
6
6
|
out: TOut;
|
|
@@ -28,7 +28,7 @@ declare function parseU256(data: readonly FieldElement[], offset: number): {
|
|
|
28
28
|
offset: number;
|
|
29
29
|
};
|
|
30
30
|
declare function parseAsHex(data: readonly FieldElement[], offset: number): {
|
|
31
|
-
out: string
|
|
31
|
+
out: `0x${string}`;
|
|
32
32
|
offset: number;
|
|
33
33
|
};
|
|
34
34
|
declare const parseContractAddress: typeof parseAsHex;
|
|
@@ -40,7 +40,7 @@ declare function parseFelt252(data: readonly FieldElement[], offset: number): {
|
|
|
40
40
|
out: bigint;
|
|
41
41
|
offset: number;
|
|
42
42
|
};
|
|
43
|
-
declare function parseEmpty(
|
|
43
|
+
declare function parseEmpty(_data: readonly FieldElement[], offset: number): {
|
|
44
44
|
out: null;
|
|
45
45
|
offset: number;
|
|
46
46
|
};
|
|
@@ -53,20 +53,27 @@ declare function parseOption<T>(type: Parser<T>): (data: readonly FieldElement[]
|
|
|
53
53
|
out: null;
|
|
54
54
|
offset: number;
|
|
55
55
|
};
|
|
56
|
-
declare function parseStruct<T extends {
|
|
57
|
-
[key: string]: unknown;
|
|
58
|
-
}>(parsers: {
|
|
56
|
+
declare function parseStruct<T extends Record<string, unknown>>(parsers: {
|
|
59
57
|
[K in keyof T]: {
|
|
60
58
|
index: number;
|
|
61
59
|
parser: Parser<T[K]>;
|
|
62
60
|
};
|
|
63
|
-
}):
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
61
|
+
}): Parser<{
|
|
62
|
+
[K in keyof T]: T[K];
|
|
63
|
+
}>;
|
|
64
|
+
declare function parseEnum<T extends Record<string, unknown>>(parsers: {
|
|
65
|
+
[K in keyof T]: {
|
|
66
|
+
index: number;
|
|
67
|
+
parser: Parser<T[K]>;
|
|
68
|
+
};
|
|
69
|
+
}): Parser<T[keyof T]>;
|
|
67
70
|
declare function parseTuple<T extends Parser<unknown>[]>(...parsers: T): Parser<UnwrapParsers<T>>;
|
|
68
71
|
type UnwrapParsers<TP> = {
|
|
69
72
|
[Index in keyof TP]: TP[Index] extends Parser<infer U> ? U : never;
|
|
70
73
|
};
|
|
74
|
+
declare function parseByteArray(data: readonly FieldElement[], offset: number): {
|
|
75
|
+
out: string;
|
|
76
|
+
offset: number;
|
|
77
|
+
};
|
|
71
78
|
|
|
72
|
-
export { ParseError, type Parser, parseArray, parseAsBigInt, parseAsHex, parseBool, parseBytes31, parseClassHash, parseContractAddress, parseEmpty, parseEthAddress, parseFelt252, parseOption, parseSpan, parseStorageAddress, parseStruct, parseTuple, parseU128, parseU16, parseU256, parseU32, parseU64, parseU8, parseUsize };
|
|
79
|
+
export { ParseError, type Parser, parseArray, parseAsBigInt, parseAsHex, parseBool, parseByteArray, parseBytes31, parseClassHash, parseContractAddress, parseEmpty, parseEnum, parseEthAddress, parseFelt252, parseOption, parseSpan, parseStorageAddress, parseStruct, parseTuple, parseU128, parseU16, parseU256, parseU32, parseU64, parseU8, parseUsize };
|
package/dist/parser.mjs
CHANGED
|
@@ -35,7 +35,7 @@ function parseU256(data, offset) {
|
|
|
35
35
|
function parseAsHex(data, offset) {
|
|
36
36
|
assertInBounds(data, offset);
|
|
37
37
|
return {
|
|
38
|
-
out:
|
|
38
|
+
out: data[offset],
|
|
39
39
|
offset: offset + 1
|
|
40
40
|
};
|
|
41
41
|
}
|
|
@@ -51,7 +51,7 @@ function parseFelt252(data, offset) {
|
|
|
51
51
|
offset: offset + 1
|
|
52
52
|
};
|
|
53
53
|
}
|
|
54
|
-
function parseEmpty(
|
|
54
|
+
function parseEmpty(_data, offset) {
|
|
55
55
|
return { out: null, offset };
|
|
56
56
|
}
|
|
57
57
|
function parseArray(type) {
|
|
@@ -82,16 +82,35 @@ function parseStruct(parsers) {
|
|
|
82
82
|
const sortedParsers = Object.entries(parsers).sort(
|
|
83
83
|
(a, b) => a[1].index - b[1].index
|
|
84
84
|
);
|
|
85
|
-
|
|
85
|
+
const parser = (data, startingOffset) => {
|
|
86
86
|
let offset = startingOffset;
|
|
87
87
|
const out = {};
|
|
88
|
-
for (const [key, { parser }] of sortedParsers) {
|
|
89
|
-
const { out: value, offset: newOffset } =
|
|
88
|
+
for (const [key, { parser: parser2 }] of sortedParsers) {
|
|
89
|
+
const { out: value, offset: newOffset } = parser2(data, offset);
|
|
90
90
|
out[key] = value;
|
|
91
91
|
offset = newOffset;
|
|
92
92
|
}
|
|
93
93
|
return { out, offset };
|
|
94
94
|
};
|
|
95
|
+
return parser;
|
|
96
|
+
}
|
|
97
|
+
function parseEnum(parsers) {
|
|
98
|
+
return (data, startingOffset) => {
|
|
99
|
+
const selectorFelt = data[startingOffset];
|
|
100
|
+
const selector = Number(BigInt(selectorFelt));
|
|
101
|
+
const parserEntry = Object.entries(parsers).find(
|
|
102
|
+
([, { index }]) => index === selector
|
|
103
|
+
);
|
|
104
|
+
if (!parserEntry) {
|
|
105
|
+
throw new ParseError(`Unknown enum variant selector: ${selector}`);
|
|
106
|
+
}
|
|
107
|
+
const [variantName, { parser }] = parserEntry;
|
|
108
|
+
const { out, offset: newOffset } = parser(data, startingOffset + 1);
|
|
109
|
+
return {
|
|
110
|
+
out: { _tag: variantName, [variantName]: out },
|
|
111
|
+
offset: newOffset
|
|
112
|
+
};
|
|
113
|
+
};
|
|
95
114
|
}
|
|
96
115
|
function parseTuple(...parsers) {
|
|
97
116
|
return (data, startingOffset) => {
|
|
@@ -105,5 +124,38 @@ function parseTuple(...parsers) {
|
|
|
105
124
|
return { out, offset };
|
|
106
125
|
};
|
|
107
126
|
}
|
|
127
|
+
const parseByteArrayStruct = parseStruct({
|
|
128
|
+
data: {
|
|
129
|
+
index: 0,
|
|
130
|
+
parser: parseArray(parseBytes31)
|
|
131
|
+
},
|
|
132
|
+
pendingWord: { index: 1, parser: parseFelt252 },
|
|
133
|
+
pendingWordLen: { index: 2, parser: parseU32 }
|
|
134
|
+
});
|
|
135
|
+
function parseByteArray(data, offset) {
|
|
136
|
+
const { out, offset: offsetOut } = parseByteArrayStruct(data, offset);
|
|
137
|
+
const dataBytes = out.data.map((bytes2) => bytes2.slice(2).padStart(62, "0")).join("");
|
|
138
|
+
let pending = out.pendingWord.toString(16);
|
|
139
|
+
const pendingWordLength = Number(out.pendingWordLen);
|
|
140
|
+
if (pending.length < pendingWordLength * 2) {
|
|
141
|
+
pending = pending.padStart(pendingWordLength * 2, "0");
|
|
142
|
+
}
|
|
143
|
+
const pendingBytes = pending.slice(pending.length - 2 * pendingWordLength);
|
|
144
|
+
const bytes = removeLeadingZeros(dataBytes + pendingBytes);
|
|
145
|
+
return { out: `0x${bytes}`, offset: offsetOut };
|
|
146
|
+
}
|
|
147
|
+
function removeLeadingZeros(bytes) {
|
|
148
|
+
for (let i = 0; i < bytes.length; i++) {
|
|
149
|
+
if (bytes[i] !== "0") {
|
|
150
|
+
let j = i;
|
|
151
|
+
if (i % 2 !== 0) {
|
|
152
|
+
j -= 1;
|
|
153
|
+
}
|
|
154
|
+
return bytes.slice(j);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
return "00";
|
|
158
|
+
}
|
|
108
159
|
|
|
109
|
-
export { ParseError, parseArray, parseAsBigInt, parseAsHex, parseBool, parseBytes31, parseClassHash, parseContractAddress, parseEmpty, parseEthAddress, parseFelt252, parseOption, parseSpan, parseStorageAddress, parseStruct, parseTuple, parseU128, parseU16, parseU256, parseU32, parseU64, parseU8, parseUsize };
|
|
160
|
+
export { ParseError, parseArray, parseAsBigInt, parseAsHex, parseBool, parseByteArray, parseBytes31, parseClassHash, parseContractAddress, parseEmpty, parseEnum, parseEthAddress, parseFelt252, parseOption, parseSpan, parseStorageAddress, parseStruct, parseTuple, parseU128, parseU16, parseU256, parseU32, parseU64, parseU8, parseUsize };
|
|
161
|
+
//# sourceMappingURL=parser.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"parser.mjs","sources":["../src/parser.ts"],"sourcesContent":["/*\n * Calldata combinatorial parsers.\n *\n * Based on the Ekubo's event parser.\n *\n * MIT License\n *\n * Copyright (c) 2023 Ekubo, Inc.\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\nimport type { FieldElement } from \"./common\";\n\nexport type Parser<TOut> = (\n data: readonly FieldElement[],\n offset: number,\n) => { out: TOut; offset: number };\n\nexport class ParseError extends Error {\n constructor(message: string) {\n super(message);\n this.name = \"ParseError\";\n }\n}\n\n// Primitive types.\n\nfunction assertInBounds(data: readonly FieldElement[], offset: number) {\n if (offset >= data.length) {\n throw new ParseError(\n `Offset out of bounds. Data length ${data.length}, offset ${offset}`,\n );\n }\n}\n\nexport function parseBool(data: readonly FieldElement[], offset: number) {\n assertInBounds(data, offset);\n return { out: BigInt(data[offset]) > 0n, offset: offset + 1 };\n}\n\nexport function parseAsBigInt(data: readonly FieldElement[], offset: number) {\n assertInBounds(data, offset);\n return { out: BigInt(data[offset]), offset: offset + 1 };\n}\n\nexport const parseU8 = parseAsBigInt;\nexport const parseU16 = parseAsBigInt;\nexport const parseU32 = parseAsBigInt;\nexport const parseU64 = parseAsBigInt;\nexport const parseU128 = parseAsBigInt;\nexport const parseUsize = parseAsBigInt;\n\nexport function parseU256(data: readonly FieldElement[], offset: number) {\n assertInBounds(data, offset + 1);\n return {\n out: BigInt(data[offset]) + (BigInt(data[offset + 1]) << 128n),\n offset: offset + 2,\n };\n}\n\nexport function parseAsHex(data: readonly FieldElement[], offset: number) {\n assertInBounds(data, offset);\n return {\n out: data[offset],\n offset: offset + 1,\n };\n}\n\nexport const parseContractAddress = parseAsHex;\nexport const parseEthAddress = parseAsHex;\nexport const parseStorageAddress = parseAsHex;\nexport const parseClassHash = parseAsHex;\nexport const parseBytes31 = parseAsHex;\n\nexport function parseFelt252(data: readonly FieldElement[], offset: number) {\n assertInBounds(data, offset);\n return {\n out: BigInt(data[offset]),\n offset: offset + 1,\n };\n}\n\nexport function parseEmpty(_data: readonly FieldElement[], offset: number) {\n return { out: null, offset };\n}\n\n// Higher-level types.\n\nexport function parseArray<T>(type: Parser<T>): Parser<T[]> {\n return (data: readonly FieldElement[], startingOffset: number) => {\n let offset = startingOffset;\n const length = BigInt(data[offset]);\n\n offset++;\n\n const out: T[] = [];\n for (let i = 0; i < length; i++) {\n const { out: item, offset: newOffset } = type(data, offset);\n out.push(item);\n offset = newOffset;\n }\n\n return { out, offset };\n };\n}\n\nexport const parseSpan = parseArray;\n\nexport function parseOption<T>(type: Parser<T>) {\n return (data: readonly FieldElement[], offset: number) => {\n const hasValue = BigInt(data[offset]) === 1n;\n if (hasValue) {\n return type(data, offset + 1);\n }\n return { out: null, offset: offset + 1 };\n };\n}\n\nexport function parseStruct<T extends Record<string, unknown>>(\n parsers: {\n [K in keyof T]: { index: number; parser: Parser<T[K]> };\n },\n): Parser<{ [K in keyof T]: T[K] }> {\n const sortedParsers = Object.entries(parsers).sort(\n (a, b) => a[1].index - b[1].index,\n );\n const parser = (data: readonly FieldElement[], startingOffset: number) => {\n let offset = startingOffset;\n const out: Record<string, unknown> = {};\n for (const [key, { parser }] of sortedParsers) {\n const { out: value, offset: newOffset } = parser(data, offset);\n out[key] = value;\n offset = newOffset;\n }\n return { out, offset };\n };\n return parser as Parser<{ [K in keyof T]: T[K] }>;\n}\n\nexport function parseEnum<T extends Record<string, unknown>>(\n parsers: {\n [K in keyof T]: { index: number; parser: Parser<T[K]> };\n },\n): Parser<T[keyof T]> {\n return (data: readonly FieldElement[], startingOffset: number) => {\n const selectorFelt = data[startingOffset];\n const selector = Number(BigInt(selectorFelt));\n\n // Find the parser by index\n const parserEntry = Object.entries(parsers).find(\n ([, { index }]) => index === selector,\n );\n\n if (!parserEntry) {\n throw new ParseError(`Unknown enum variant selector: ${selector}`);\n }\n\n const [variantName, { parser }] = parserEntry;\n const { out, offset: newOffset } = parser(data, startingOffset + 1);\n\n return {\n out: { _tag: variantName, [variantName]: out } as T[keyof T],\n offset: newOffset,\n };\n };\n}\n\nexport function parseTuple<T extends Parser<unknown>[]>(\n ...parsers: T\n): Parser<UnwrapParsers<T>> {\n return (data: readonly FieldElement[], startingOffset: number) => {\n let offset = startingOffset;\n const out = [];\n for (const parser of parsers) {\n const { out: value, offset: newOffset } = parser(data, offset);\n out.push(value);\n offset = newOffset;\n }\n return { out, offset } as { out: UnwrapParsers<T>; offset: number };\n };\n}\n\ntype UnwrapParsers<TP> = {\n [Index in keyof TP]: TP[Index] extends Parser<infer U> ? U : never;\n};\n\nconst parseByteArrayStruct = parseStruct({\n data: {\n index: 0,\n parser: parseArray(parseBytes31),\n },\n pendingWord: { index: 1, parser: parseFelt252 },\n pendingWordLen: { index: 2, parser: parseU32 },\n});\n\nexport function parseByteArray(data: readonly FieldElement[], offset: number) {\n // A ByteArray is a struct with the following abi:\n //\n // {\n // name: \"core::byte_array::ByteArray\",\n // type: \"struct\",\n // members: [\n // {\n // name: \"data\",\n // type: \"core::array::Array::<core::bytes_31::bytes31>\",\n // },\n // {\n // name: \"pending_word\",\n // type: \"core::felt252\",\n // },\n // {\n // name: \"pending_word_len\",\n // type: \"core::integer::u32\",\n // },\n // ],\n // },\n //\n // We first parse it using a parser for that struct, then convert it to the output `0x${string}` type.\n const { out, offset: offsetOut } = parseByteArrayStruct(data, offset);\n\n // Remove 0x prefix from data elements and pad them to 31 bytes.\n const dataBytes = out.data\n .map((bytes) => bytes.slice(2).padStart(62, \"0\"))\n .join(\"\");\n\n let pending = out.pendingWord.toString(16);\n const pendingWordLength = Number(out.pendingWordLen);\n if (pending.length < pendingWordLength * 2) {\n pending = pending.padStart(pendingWordLength * 2, \"0\");\n }\n\n const pendingBytes = pending.slice(pending.length - 2 * pendingWordLength);\n const bytes = removeLeadingZeros(dataBytes + pendingBytes);\n\n return { out: `0x${bytes}`, offset: offsetOut };\n}\n\nfunction removeLeadingZeros(bytes: string): string {\n for (let i = 0; i < bytes.length; i++) {\n if (bytes[i] !== \"0\") {\n let j = i;\n if (i % 2 !== 0) {\n j -= 1;\n }\n return bytes.slice(j);\n }\n }\n // The bytes are all 0, so return something reasonable.\n return \"00\";\n}\n"],"names":["parser","bytes"],"mappings":"AAkCO,MAAM,mBAAmB,KAAM,CAAA;AAAA,EACpC,YAAY,OAAiB,EAAA;AAC3B,IAAA,KAAA,CAAM,OAAO,CAAA,CAAA;AACb,IAAA,IAAA,CAAK,IAAO,GAAA,YAAA,CAAA;AAAA,GACd;AACF,CAAA;AAIA,SAAS,cAAA,CAAe,MAA+B,MAAgB,EAAA;AACrE,EAAI,IAAA,MAAA,IAAU,KAAK,MAAQ,EAAA;AACzB,IAAA,MAAM,IAAI,UAAA;AAAA,MACR,CAAqC,kCAAA,EAAA,IAAA,CAAK,MAAM,CAAA,SAAA,EAAY,MAAM,CAAA,CAAA;AAAA,KACpE,CAAA;AAAA,GACF;AACF,CAAA;AAEgB,SAAA,SAAA,CAAU,MAA+B,MAAgB,EAAA;AACvE,EAAA,cAAA,CAAe,MAAM,MAAM,CAAA,CAAA;AAC3B,EAAO,OAAA,EAAE,GAAK,EAAA,MAAA,CAAO,IAAK,CAAA,MAAM,CAAC,CAAI,GAAA,EAAA,EAAI,MAAQ,EAAA,MAAA,GAAS,CAAE,EAAA,CAAA;AAC9D,CAAA;AAEgB,SAAA,aAAA,CAAc,MAA+B,MAAgB,EAAA;AAC3E,EAAA,cAAA,CAAe,MAAM,MAAM,CAAA,CAAA;AAC3B,EAAO,OAAA,EAAE,KAAK,MAAO,CAAA,IAAA,CAAK,MAAM,CAAC,CAAA,EAAG,MAAQ,EAAA,MAAA,GAAS,CAAE,EAAA,CAAA;AACzD,CAAA;AAEO,MAAM,OAAU,GAAA,cAAA;AAChB,MAAM,QAAW,GAAA,cAAA;AACjB,MAAM,QAAW,GAAA,cAAA;AACjB,MAAM,QAAW,GAAA,cAAA;AACjB,MAAM,SAAY,GAAA,cAAA;AAClB,MAAM,UAAa,GAAA,cAAA;AAEV,SAAA,SAAA,CAAU,MAA+B,MAAgB,EAAA;AACvE,EAAe,cAAA,CAAA,IAAA,EAAM,SAAS,CAAC,CAAA,CAAA;AAC/B,EAAO,OAAA;AAAA,IACL,GAAA,EAAK,MAAO,CAAA,IAAA,CAAK,MAAM,CAAC,CAAK,IAAA,MAAA,CAAO,IAAK,CAAA,MAAA,GAAS,CAAC,CAAC,CAAK,IAAA,IAAA,CAAA;AAAA,IACzD,QAAQ,MAAS,GAAA,CAAA;AAAA,GACnB,CAAA;AACF,CAAA;AAEgB,SAAA,UAAA,CAAW,MAA+B,MAAgB,EAAA;AACxE,EAAA,cAAA,CAAe,MAAM,MAAM,CAAA,CAAA;AAC3B,EAAO,OAAA;AAAA,IACL,GAAA,EAAK,KAAK,MAAM,CAAA;AAAA,IAChB,QAAQ,MAAS,GAAA,CAAA;AAAA,GACnB,CAAA;AACF,CAAA;AAEO,MAAM,oBAAuB,GAAA,WAAA;AAC7B,MAAM,eAAkB,GAAA,WAAA;AACxB,MAAM,mBAAsB,GAAA,WAAA;AAC5B,MAAM,cAAiB,GAAA,WAAA;AACvB,MAAM,YAAe,GAAA,WAAA;AAEZ,SAAA,YAAA,CAAa,MAA+B,MAAgB,EAAA;AAC1E,EAAA,cAAA,CAAe,MAAM,MAAM,CAAA,CAAA;AAC3B,EAAO,OAAA;AAAA,IACL,GAAK,EAAA,MAAA,CAAO,IAAK,CAAA,MAAM,CAAC,CAAA;AAAA,IACxB,QAAQ,MAAS,GAAA,CAAA;AAAA,GACnB,CAAA;AACF,CAAA;AAEgB,SAAA,UAAA,CAAW,OAAgC,MAAgB,EAAA;AACzE,EAAO,OAAA,EAAE,GAAK,EAAA,IAAA,EAAM,MAAO,EAAA,CAAA;AAC7B,CAAA;AAIO,SAAS,WAAc,IAA8B,EAAA;AAC1D,EAAO,OAAA,CAAC,MAA+B,cAA2B,KAAA;AAChE,IAAA,IAAI,MAAS,GAAA,cAAA,CAAA;AACb,IAAA,MAAM,MAAS,GAAA,MAAA,CAAO,IAAK,CAAA,MAAM,CAAC,CAAA,CAAA;AAElC,IAAA,MAAA,EAAA,CAAA;AAEA,IAAA,MAAM,MAAW,EAAC,CAAA;AAClB,IAAA,KAAA,IAAS,CAAI,GAAA,CAAA,EAAG,CAAI,GAAA,MAAA,EAAQ,CAAK,EAAA,EAAA;AAC/B,MAAM,MAAA,EAAE,KAAK,IAAM,EAAA,MAAA,EAAQ,WAAc,GAAA,IAAA,CAAK,MAAM,MAAM,CAAA,CAAA;AAC1D,MAAA,GAAA,CAAI,KAAK,IAAI,CAAA,CAAA;AACb,MAAS,MAAA,GAAA,SAAA,CAAA;AAAA,KACX;AAEA,IAAO,OAAA,EAAE,KAAK,MAAO,EAAA,CAAA;AAAA,GACvB,CAAA;AACF,CAAA;AAEO,MAAM,SAAY,GAAA,WAAA;AAElB,SAAS,YAAe,IAAiB,EAAA;AAC9C,EAAO,OAAA,CAAC,MAA+B,MAAmB,KAAA;AACxD,IAAA,MAAM,QAAW,GAAA,MAAA,CAAO,IAAK,CAAA,MAAM,CAAC,CAAM,KAAA,EAAA,CAAA;AAC1C,IAAA,IAAI,QAAU,EAAA;AACZ,MAAO,OAAA,IAAA,CAAK,IAAM,EAAA,MAAA,GAAS,CAAC,CAAA,CAAA;AAAA,KAC9B;AACA,IAAA,OAAO,EAAE,GAAA,EAAK,IAAM,EAAA,MAAA,EAAQ,SAAS,CAAE,EAAA,CAAA;AAAA,GACzC,CAAA;AACF,CAAA;AAEO,SAAS,YACd,OAGkC,EAAA;AAClC,EAAA,MAAM,aAAgB,GAAA,MAAA,CAAO,OAAQ,CAAA,OAAO,CAAE,CAAA,IAAA;AAAA,IAC5C,CAAC,GAAG,CAAM,KAAA,CAAA,CAAE,CAAC,CAAE,CAAA,KAAA,GAAQ,CAAE,CAAA,CAAC,CAAE,CAAA,KAAA;AAAA,GAC9B,CAAA;AACA,EAAM,MAAA,MAAA,GAAS,CAAC,IAAA,EAA+B,cAA2B,KAAA;AACxE,IAAA,IAAI,MAAS,GAAA,cAAA,CAAA;AACb,IAAA,MAAM,MAA+B,EAAC,CAAA;AACtC,IAAA,KAAA,MAAW,CAAC,GAAK,EAAA,EAAE,QAAAA,OAAO,EAAC,KAAK,aAAe,EAAA;AAC7C,MAAM,MAAA,EAAE,KAAK,KAAO,EAAA,MAAA,EAAQ,WAAcA,GAAAA,OAAAA,CAAO,MAAM,MAAM,CAAA,CAAA;AAC7D,MAAA,GAAA,CAAI,GAAG,CAAI,GAAA,KAAA,CAAA;AACX,MAAS,MAAA,GAAA,SAAA,CAAA;AAAA,KACX;AACA,IAAO,OAAA,EAAE,KAAK,MAAO,EAAA,CAAA;AAAA,GACvB,CAAA;AACA,EAAO,OAAA,MAAA,CAAA;AACT,CAAA;AAEO,SAAS,UACd,OAGoB,EAAA;AACpB,EAAO,OAAA,CAAC,MAA+B,cAA2B,KAAA;AAChE,IAAM,MAAA,YAAA,GAAe,KAAK,cAAc,CAAA,CAAA;AACxC,IAAA,MAAM,QAAW,GAAA,MAAA,CAAO,MAAO,CAAA,YAAY,CAAC,CAAA,CAAA;AAG5C,IAAA,MAAM,WAAc,GAAA,MAAA,CAAO,OAAQ,CAAA,OAAO,CAAE,CAAA,IAAA;AAAA,MAC1C,CAAC,GAAG,EAAE,KAAM,EAAC,MAAM,KAAU,KAAA,QAAA;AAAA,KAC/B,CAAA;AAEA,IAAA,IAAI,CAAC,WAAa,EAAA;AAChB,MAAA,MAAM,IAAI,UAAA,CAAW,CAAkC,+BAAA,EAAA,QAAQ,CAAE,CAAA,CAAA,CAAA;AAAA,KACnE;AAEA,IAAA,MAAM,CAAC,WAAA,EAAa,EAAE,MAAA,EAAQ,CAAI,GAAA,WAAA,CAAA;AAClC,IAAM,MAAA,EAAE,KAAK,MAAQ,EAAA,SAAA,KAAc,MAAO,CAAA,IAAA,EAAM,iBAAiB,CAAC,CAAA,CAAA;AAElE,IAAO,OAAA;AAAA,MACL,KAAK,EAAE,IAAA,EAAM,aAAa,CAAC,WAAW,GAAG,GAAI,EAAA;AAAA,MAC7C,MAAQ,EAAA,SAAA;AAAA,KACV,CAAA;AAAA,GACF,CAAA;AACF,CAAA;AAEO,SAAS,cACX,OACuB,EAAA;AAC1B,EAAO,OAAA,CAAC,MAA+B,cAA2B,KAAA;AAChE,IAAA,IAAI,MAAS,GAAA,cAAA,CAAA;AACb,IAAA,MAAM,MAAM,EAAC,CAAA;AACb,IAAA,KAAA,MAAW,UAAU,OAAS,EAAA;AAC5B,MAAM,MAAA,EAAE,KAAK,KAAO,EAAA,MAAA,EAAQ,WAAc,GAAA,MAAA,CAAO,MAAM,MAAM,CAAA,CAAA;AAC7D,MAAA,GAAA,CAAI,KAAK,KAAK,CAAA,CAAA;AACd,MAAS,MAAA,GAAA,SAAA,CAAA;AAAA,KACX;AACA,IAAO,OAAA,EAAE,KAAK,MAAO,EAAA,CAAA;AAAA,GACvB,CAAA;AACF,CAAA;AAMA,MAAM,uBAAuB,WAAY,CAAA;AAAA,EACvC,IAAM,EAAA;AAAA,IACJ,KAAO,EAAA,CAAA;AAAA,IACP,MAAA,EAAQ,WAAW,YAAY,CAAA;AAAA,GACjC;AAAA,EACA,WAAa,EAAA,EAAE,KAAO,EAAA,CAAA,EAAG,QAAQ,YAAa,EAAA;AAAA,EAC9C,cAAgB,EAAA,EAAE,KAAO,EAAA,CAAA,EAAG,QAAQ,QAAS,EAAA;AAC/C,CAAC,CAAA,CAAA;AAEe,SAAA,cAAA,CAAe,MAA+B,MAAgB,EAAA;AAuB5E,EAAA,MAAM,EAAE,GAAK,EAAA,MAAA,EAAQ,WAAc,GAAA,oBAAA,CAAqB,MAAM,MAAM,CAAA,CAAA;AAGpE,EAAA,MAAM,YAAY,GAAI,CAAA,IAAA,CACnB,GAAI,CAAA,CAACC,WAAUA,MAAM,CAAA,KAAA,CAAM,CAAC,CAAA,CAAE,SAAS,EAAI,EAAA,GAAG,CAAC,CAAA,CAC/C,KAAK,EAAE,CAAA,CAAA;AAEV,EAAA,IAAI,OAAU,GAAA,GAAA,CAAI,WAAY,CAAA,QAAA,CAAS,EAAE,CAAA,CAAA;AACzC,EAAM,MAAA,iBAAA,GAAoB,MAAO,CAAA,GAAA,CAAI,cAAc,CAAA,CAAA;AACnD,EAAI,IAAA,OAAA,CAAQ,MAAS,GAAA,iBAAA,GAAoB,CAAG,EAAA;AAC1C,IAAA,OAAA,GAAU,OAAQ,CAAA,QAAA,CAAS,iBAAoB,GAAA,CAAA,EAAG,GAAG,CAAA,CAAA;AAAA,GACvD;AAEA,EAAA,MAAM,eAAe,OAAQ,CAAA,KAAA,CAAM,OAAQ,CAAA,MAAA,GAAS,IAAI,iBAAiB,CAAA,CAAA;AACzE,EAAM,MAAA,KAAA,GAAQ,kBAAmB,CAAA,SAAA,GAAY,YAAY,CAAA,CAAA;AAEzD,EAAA,OAAO,EAAE,GAAK,EAAA,CAAA,EAAA,EAAK,KAAK,CAAA,CAAA,EAAI,QAAQ,SAAU,EAAA,CAAA;AAChD,CAAA;AAEA,SAAS,mBAAmB,KAAuB,EAAA;AACjD,EAAA,KAAA,IAAS,CAAI,GAAA,CAAA,EAAG,CAAI,GAAA,KAAA,CAAM,QAAQ,CAAK,EAAA,EAAA;AACrC,IAAI,IAAA,KAAA,CAAM,CAAC,CAAA,KAAM,GAAK,EAAA;AACpB,MAAA,IAAI,CAAI,GAAA,CAAA,CAAA;AACR,MAAI,IAAA,CAAA,GAAI,MAAM,CAAG,EAAA;AACf,QAAK,CAAA,IAAA,CAAA,CAAA;AAAA,OACP;AACA,MAAO,OAAA,KAAA,CAAM,MAAM,CAAC,CAAA,CAAA;AAAA,KACtB;AAAA,GACF;AAEA,EAAO,OAAA,IAAA,CAAA;AACT;;;;"}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import { Codec, CodecType } from '@apibara/protocol/codec';
|
|
2
|
+
import _m0 from 'protobufjs/minimal.js';
|
|
3
|
+
|
|
4
|
+
declare const protobufPackage = "starknet.v2";
|
|
5
|
+
/** A field element. */
|
|
6
|
+
interface FieldElement$1 {
|
|
7
|
+
readonly x0?: bigint | undefined;
|
|
8
|
+
readonly x1?: bigint | undefined;
|
|
9
|
+
readonly x2?: bigint | undefined;
|
|
10
|
+
readonly x3?: bigint | undefined;
|
|
11
|
+
}
|
|
12
|
+
declare const FieldElement$1: {
|
|
13
|
+
encode(message: FieldElement$1, writer?: _m0.Writer): _m0.Writer;
|
|
14
|
+
decode(input: _m0.Reader | Uint8Array, length?: number): FieldElement$1;
|
|
15
|
+
fromJSON(object: any): FieldElement$1;
|
|
16
|
+
toJSON(message: FieldElement$1): unknown;
|
|
17
|
+
create(base?: DeepPartial<FieldElement$1>): FieldElement$1;
|
|
18
|
+
fromPartial(object: DeepPartial<FieldElement$1>): FieldElement$1;
|
|
19
|
+
};
|
|
20
|
+
type Builtin = Date | Function | Uint8Array | string | number | boolean | bigint | undefined;
|
|
21
|
+
type DeepPartial<T> = T extends Builtin ? T : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>> : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>> : T extends {
|
|
22
|
+
readonly $case: string;
|
|
23
|
+
} ? {
|
|
24
|
+
[K in keyof Omit<T, "$case">]?: DeepPartial<T[K]>;
|
|
25
|
+
} & {
|
|
26
|
+
readonly $case: T["$case"];
|
|
27
|
+
} : T extends {} ? {
|
|
28
|
+
[K in keyof T]?: DeepPartial<T[K]>;
|
|
29
|
+
} : Partial<T>;
|
|
30
|
+
|
|
31
|
+
type common_DeepPartial<T> = DeepPartial<T>;
|
|
32
|
+
declare const common_protobufPackage: typeof protobufPackage;
|
|
33
|
+
declare namespace common {
|
|
34
|
+
export { type common_DeepPartial as DeepPartial, FieldElement$1 as FieldElement, common_protobufPackage as protobufPackage };
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
declare const FieldElement: Codec<`0x${string}`, FieldElement$1>;
|
|
38
|
+
type FieldElement = CodecType<typeof FieldElement>;
|
|
39
|
+
|
|
40
|
+
export { FieldElement$1 as F, FieldElement as a, common as c };
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import { Codec, CodecType } from '@apibara/protocol/codec';
|
|
2
|
+
import _m0 from 'protobufjs/minimal.js';
|
|
3
|
+
|
|
4
|
+
declare const protobufPackage = "starknet.v2";
|
|
5
|
+
/** A field element. */
|
|
6
|
+
interface FieldElement$1 {
|
|
7
|
+
readonly x0?: bigint | undefined;
|
|
8
|
+
readonly x1?: bigint | undefined;
|
|
9
|
+
readonly x2?: bigint | undefined;
|
|
10
|
+
readonly x3?: bigint | undefined;
|
|
11
|
+
}
|
|
12
|
+
declare const FieldElement$1: {
|
|
13
|
+
encode(message: FieldElement$1, writer?: _m0.Writer): _m0.Writer;
|
|
14
|
+
decode(input: _m0.Reader | Uint8Array, length?: number): FieldElement$1;
|
|
15
|
+
fromJSON(object: any): FieldElement$1;
|
|
16
|
+
toJSON(message: FieldElement$1): unknown;
|
|
17
|
+
create(base?: DeepPartial<FieldElement$1>): FieldElement$1;
|
|
18
|
+
fromPartial(object: DeepPartial<FieldElement$1>): FieldElement$1;
|
|
19
|
+
};
|
|
20
|
+
type Builtin = Date | Function | Uint8Array | string | number | boolean | bigint | undefined;
|
|
21
|
+
type DeepPartial<T> = T extends Builtin ? T : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>> : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>> : T extends {
|
|
22
|
+
readonly $case: string;
|
|
23
|
+
} ? {
|
|
24
|
+
[K in keyof Omit<T, "$case">]?: DeepPartial<T[K]>;
|
|
25
|
+
} & {
|
|
26
|
+
readonly $case: T["$case"];
|
|
27
|
+
} : T extends {} ? {
|
|
28
|
+
[K in keyof T]?: DeepPartial<T[K]>;
|
|
29
|
+
} : Partial<T>;
|
|
30
|
+
|
|
31
|
+
type common_DeepPartial<T> = DeepPartial<T>;
|
|
32
|
+
declare const common_protobufPackage: typeof protobufPackage;
|
|
33
|
+
declare namespace common {
|
|
34
|
+
export { type common_DeepPartial as DeepPartial, FieldElement$1 as FieldElement, common_protobufPackage as protobufPackage };
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
declare const FieldElement: Codec<`0x${string}`, FieldElement$1>;
|
|
38
|
+
type FieldElement = CodecType<typeof FieldElement>;
|
|
39
|
+
|
|
40
|
+
export { FieldElement$1 as F, FieldElement as a, common as c };
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import { Codec, CodecType } from '@apibara/protocol/codec';
|
|
2
|
+
import _m0 from 'protobufjs/minimal.js';
|
|
3
|
+
|
|
4
|
+
declare const protobufPackage = "starknet.v2";
|
|
5
|
+
/** A field element. */
|
|
6
|
+
interface FieldElement$1 {
|
|
7
|
+
readonly x0?: bigint | undefined;
|
|
8
|
+
readonly x1?: bigint | undefined;
|
|
9
|
+
readonly x2?: bigint | undefined;
|
|
10
|
+
readonly x3?: bigint | undefined;
|
|
11
|
+
}
|
|
12
|
+
declare const FieldElement$1: {
|
|
13
|
+
encode(message: FieldElement$1, writer?: _m0.Writer): _m0.Writer;
|
|
14
|
+
decode(input: _m0.Reader | Uint8Array, length?: number): FieldElement$1;
|
|
15
|
+
fromJSON(object: any): FieldElement$1;
|
|
16
|
+
toJSON(message: FieldElement$1): unknown;
|
|
17
|
+
create(base?: DeepPartial<FieldElement$1>): FieldElement$1;
|
|
18
|
+
fromPartial(object: DeepPartial<FieldElement$1>): FieldElement$1;
|
|
19
|
+
};
|
|
20
|
+
type Builtin = Date | Function | Uint8Array | string | number | boolean | bigint | undefined;
|
|
21
|
+
type DeepPartial<T> = T extends Builtin ? T : T extends globalThis.Array<infer U> ? globalThis.Array<DeepPartial<U>> : T extends ReadonlyArray<infer U> ? ReadonlyArray<DeepPartial<U>> : T extends {
|
|
22
|
+
readonly $case: string;
|
|
23
|
+
} ? {
|
|
24
|
+
[K in keyof Omit<T, "$case">]?: DeepPartial<T[K]>;
|
|
25
|
+
} & {
|
|
26
|
+
readonly $case: T["$case"];
|
|
27
|
+
} : T extends {} ? {
|
|
28
|
+
[K in keyof T]?: DeepPartial<T[K]>;
|
|
29
|
+
} : Partial<T>;
|
|
30
|
+
|
|
31
|
+
type common_DeepPartial<T> = DeepPartial<T>;
|
|
32
|
+
declare const common_protobufPackage: typeof protobufPackage;
|
|
33
|
+
declare namespace common {
|
|
34
|
+
export { type common_DeepPartial as DeepPartial, FieldElement$1 as FieldElement, common_protobufPackage as protobufPackage };
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
declare const FieldElement: Codec<`0x${string}`, FieldElement$1>;
|
|
38
|
+
type FieldElement = CodecType<typeof FieldElement>;
|
|
39
|
+
|
|
40
|
+
export { FieldElement$1 as F, FieldElement as a, common as c };
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@apibara/starknet",
|
|
3
|
-
"version": "2.1.0-beta.
|
|
3
|
+
"version": "2.1.0-beta.40",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"files": [
|
|
6
6
|
"dist",
|
|
@@ -27,8 +27,8 @@
|
|
|
27
27
|
"build": "pnpm build:proto && unbuild",
|
|
28
28
|
"build:proto": "buf generate proto",
|
|
29
29
|
"typecheck": "tsc --noEmit",
|
|
30
|
-
"test": "vitest",
|
|
31
|
-
"test:ci": "vitest run",
|
|
30
|
+
"test": "vitest --typecheck",
|
|
31
|
+
"test:ci": "vitest run --typecheck",
|
|
32
32
|
"lint": "biome check .",
|
|
33
33
|
"lint:fix": "pnpm lint --write",
|
|
34
34
|
"format": "biome format . --write"
|
|
@@ -42,11 +42,9 @@
|
|
|
42
42
|
"vitest": "^1.6.0"
|
|
43
43
|
},
|
|
44
44
|
"dependencies": {
|
|
45
|
-
"@apibara/protocol": "2.1.0-beta.
|
|
46
|
-
"@effect/schema": "^0.67.15",
|
|
45
|
+
"@apibara/protocol": "2.1.0-beta.40",
|
|
47
46
|
"@scure/starknet": "^1.1.0",
|
|
48
47
|
"abi-wan-kanabi": "^2.2.4",
|
|
49
|
-
"effect": "^3.2.6",
|
|
50
48
|
"long": "^5.2.1",
|
|
51
49
|
"nice-grpc-common": "^2.0.2",
|
|
52
50
|
"protobufjs": "^7.1.2"
|