envio 2.12.0 → 2.12.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +6 -6
- package/src/Enum.res +1 -1
- package/src/Internal.res +2 -0
- package/src/ReorgDetection.res +10 -4
- package/src/Utils.res +8 -8
- package/src/bindings/BigInt.res +1 -11
- package/src/bindings/Ethers.res +0 -9
- package/src/bindings/Postgres.res +4 -1
- package/src/db/EntityHistory.res +4 -2
- package/src/db/Table.res +113 -9
- package/src/sources/HyperSyncJsonApi.res +3 -3
- package/src/vendored/Rest.res +90 -24
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "envio",
|
|
3
|
-
"version": "v2.12.
|
|
3
|
+
"version": "v2.12.1",
|
|
4
4
|
"description": "A latency and sync speed optimized, developer friendly blockchain data indexer.",
|
|
5
5
|
"bin": "./bin.js",
|
|
6
6
|
"repository": {
|
|
@@ -23,15 +23,15 @@
|
|
|
23
23
|
},
|
|
24
24
|
"homepage": "https://envio.dev",
|
|
25
25
|
"optionalDependencies": {
|
|
26
|
-
"envio-linux-x64": "v2.12.
|
|
27
|
-
"envio-linux-arm64": "v2.12.
|
|
28
|
-
"envio-darwin-x64": "v2.12.
|
|
29
|
-
"envio-darwin-arm64": "v2.12.
|
|
26
|
+
"envio-linux-x64": "v2.12.1",
|
|
27
|
+
"envio-linux-arm64": "v2.12.1",
|
|
28
|
+
"envio-darwin-x64": "v2.12.1",
|
|
29
|
+
"envio-darwin-arm64": "v2.12.1"
|
|
30
30
|
},
|
|
31
31
|
"dependencies": {
|
|
32
32
|
"@envio-dev/hypersync-client": "0.6.3",
|
|
33
33
|
"rescript": "11.1.3",
|
|
34
|
-
"rescript-schema": "
|
|
34
|
+
"rescript-schema": "9.1.0",
|
|
35
35
|
"viem": "2.21.0"
|
|
36
36
|
},
|
|
37
37
|
"files": [
|
package/src/Enum.res
CHANGED
|
@@ -9,7 +9,7 @@ type enum<'a> = {
|
|
|
9
9
|
let make = (~name, ~variants) => {
|
|
10
10
|
name,
|
|
11
11
|
variants,
|
|
12
|
-
schema:
|
|
12
|
+
schema: S.enum(variants),
|
|
13
13
|
default: switch variants->Belt.Array.get(0) {
|
|
14
14
|
| Some(v) => v
|
|
15
15
|
| None => Js.Exn.raiseError("No variants defined for enum " ++ name)
|
package/src/Internal.res
CHANGED
package/src/ReorgDetection.res
CHANGED
|
@@ -72,7 +72,7 @@ module LastBlockScannedHashes: {
|
|
|
72
72
|
int,
|
|
73
73
|
>
|
|
74
74
|
|
|
75
|
-
let
|
|
75
|
+
let getThresholdBlockNumbers: (t, ~currentBlockHeight: int) => array<int>
|
|
76
76
|
|
|
77
77
|
let hasReorgOccurred: (t, ~reorgGuard: reorgGuard) => bool
|
|
78
78
|
|
|
@@ -399,10 +399,16 @@ module LastBlockScannedHashes: {
|
|
|
399
399
|
}
|
|
400
400
|
}
|
|
401
401
|
|
|
402
|
-
let
|
|
403
|
-
|
|
404
|
-
|
|
402
|
+
let getThresholdBlockNumbers = (self: t, ~currentBlockHeight) => {
|
|
403
|
+
let blockNumbers = []
|
|
404
|
+
let thresholdBlocknumber = currentBlockHeight - self.confirmedBlockThreshold
|
|
405
|
+
self.lastBlockScannedDataList->Belt.List.forEach(v => {
|
|
406
|
+
if v.blockNumber >= thresholdBlocknumber {
|
|
407
|
+
blockNumbers->Belt.Array.push(v.blockNumber)
|
|
408
|
+
}
|
|
405
409
|
})
|
|
410
|
+
blockNumbers
|
|
411
|
+
}
|
|
406
412
|
|
|
407
413
|
/**
|
|
408
414
|
Checks whether reorg has occured by comparing the parent hash with the last saved block hash.
|
package/src/Utils.res
CHANGED
|
@@ -266,14 +266,7 @@ let unwrapResultExn = res =>
|
|
|
266
266
|
external queueMicrotask: (unit => unit) => unit = "queueMicrotask"
|
|
267
267
|
|
|
268
268
|
module Schema = {
|
|
269
|
-
let enum =
|
|
270
|
-
|
|
271
|
-
// A hot fix after we use the version where it's supported
|
|
272
|
-
// https://github.com/DZakh/rescript-schema/blob/v8.4.0/docs/rescript-usage.md#removetypevalidation
|
|
273
|
-
let removeTypeValidationInPlace = schema => {
|
|
274
|
-
// The variables input is guaranteed to be an object, so we reset the rescript-schema type filter here
|
|
275
|
-
(schema->Obj.magic)["f"] = ()
|
|
276
|
-
}
|
|
269
|
+
let enum = S.enum
|
|
277
270
|
|
|
278
271
|
let getNonOptionalFieldNames = schema => {
|
|
279
272
|
let acc = []
|
|
@@ -300,6 +293,13 @@ module Schema = {
|
|
|
300
293
|
}
|
|
301
294
|
}
|
|
302
295
|
|
|
296
|
+
// Don't use S.unknown, since it's not serializable to json
|
|
297
|
+
// In a nutshell, this is completely unsafe.
|
|
298
|
+
let dbDate =
|
|
299
|
+
S.json(~validate=false)
|
|
300
|
+
->(magic: S.t<Js.Json.t> => S.t<Js.Date.t>)
|
|
301
|
+
->S.preprocess(_ => {serializer: date => date->magic->Js.Date.toISOString})
|
|
302
|
+
|
|
303
303
|
// When trying to serialize data to Json pg type, it will fail with
|
|
304
304
|
// PostgresError: column "params" is of type json but expression is of type boolean
|
|
305
305
|
// If there's bool or null on the root level. It works fine as object field values.
|
package/src/bindings/BigInt.res
CHANGED
|
@@ -56,14 +56,4 @@ let schema =
|
|
|
56
56
|
serializer: bigint => bigint->toString,
|
|
57
57
|
})
|
|
58
58
|
|
|
59
|
-
let nativeSchema
|
|
60
|
-
{
|
|
61
|
-
parser: unknown => {
|
|
62
|
-
if Js.typeof(unknown) !== "bigint" {
|
|
63
|
-
s.fail("Expected bigint")
|
|
64
|
-
} else {
|
|
65
|
-
unknown->Obj.magic
|
|
66
|
-
}
|
|
67
|
-
},
|
|
68
|
-
}
|
|
69
|
-
})
|
|
59
|
+
let nativeSchema = S.bigint
|
package/src/bindings/Ethers.res
CHANGED
|
@@ -185,15 +185,6 @@ module JsonRpcProvider = {
|
|
|
185
185
|
fields->Obj.magic
|
|
186
186
|
}
|
|
187
187
|
|
|
188
|
-
type listenerEvent = [#block]
|
|
189
|
-
@send external onEventListener: (t, listenerEvent, int => unit) => unit = "on"
|
|
190
|
-
|
|
191
|
-
@send external offAllEventListeners: (t, listenerEvent) => unit = "off"
|
|
192
|
-
|
|
193
|
-
let onBlock = (t, callback: int => unit) => t->onEventListener(#block, callback)
|
|
194
|
-
|
|
195
|
-
let removeOnBlockEventListener = t => t->offAllEventListeners(#block)
|
|
196
|
-
|
|
197
188
|
@send
|
|
198
189
|
external getBlockNumber: t => promise<int> = "getBlockNumber"
|
|
199
190
|
|
|
@@ -52,7 +52,7 @@ type sslOptions =
|
|
|
52
52
|
| @as("prefer") Prefer
|
|
53
53
|
| @as("verify-full") VerifyFull
|
|
54
54
|
|
|
55
|
-
let sslOptionsSchema: S.schema<sslOptions> =
|
|
55
|
+
let sslOptionsSchema: S.schema<sslOptions> = S.enum([
|
|
56
56
|
Bool(true),
|
|
57
57
|
Bool(false),
|
|
58
58
|
Require,
|
|
@@ -96,3 +96,6 @@ external makeSql: (~config: poolConfig) => sql = "postgres"
|
|
|
96
96
|
// external sql: array<string> => (sql, array<string>) => int = "sql"
|
|
97
97
|
|
|
98
98
|
@send external unsafe: (sql, string) => promise<'a> = "unsafe"
|
|
99
|
+
@send
|
|
100
|
+
external preparedUnsafe: (sql, string, unknown, @as(json`{prepare: true}`) _) => promise<'a> =
|
|
101
|
+
"unsafe"
|
package/src/db/EntityHistory.res
CHANGED
|
@@ -144,7 +144,7 @@ let insertRow = (
|
|
|
144
144
|
~historyRow: historyRow<'entity>,
|
|
145
145
|
~shouldCopyCurrentEntity,
|
|
146
146
|
) => {
|
|
147
|
-
let row = historyRow->S.
|
|
147
|
+
let row = historyRow->S.reverseConvertToJsonOrThrow(self.schema)
|
|
148
148
|
self.insertFn(sql, row, ~shouldCopyCurrentEntity)
|
|
149
149
|
}
|
|
150
150
|
|
|
@@ -155,7 +155,9 @@ let batchInsertRows = (
|
|
|
155
155
|
~shouldCopyCurrentEntity,
|
|
156
156
|
) => {
|
|
157
157
|
let rows =
|
|
158
|
-
rows
|
|
158
|
+
rows
|
|
159
|
+
->S.reverseConvertToJsonOrThrow(self.schemaRows)
|
|
160
|
+
->(Utils.magic: Js.Json.t => array<Js.Json.t>)
|
|
159
161
|
rows
|
|
160
162
|
->Belt.Array.map(row => {
|
|
161
163
|
self.insertFn(sql, row, ~shouldCopyCurrentEntity)
|
package/src/db/Table.res
CHANGED
|
@@ -80,6 +80,10 @@ let getFieldName = fieldOrDerived =>
|
|
|
80
80
|
| DerivedFrom({fieldName}) => fieldName
|
|
81
81
|
}
|
|
82
82
|
|
|
83
|
+
let getFieldType = (field: field) => {
|
|
84
|
+
(field.fieldType :> string) ++ (field.isArray ? "[]" : "")
|
|
85
|
+
}
|
|
86
|
+
|
|
83
87
|
type table = {
|
|
84
88
|
tableName: string,
|
|
85
89
|
fields: array<fieldOrDerived>,
|
|
@@ -112,6 +116,10 @@ let getFields = table =>
|
|
|
112
116
|
}
|
|
113
117
|
)
|
|
114
118
|
|
|
119
|
+
let getFieldNames = table => {
|
|
120
|
+
table->getFields->Array.map(getDbFieldName)
|
|
121
|
+
}
|
|
122
|
+
|
|
115
123
|
let getNonDefaultFields = table =>
|
|
116
124
|
table.fields->Array.keepMap(field =>
|
|
117
125
|
switch field {
|
|
@@ -138,16 +146,20 @@ let getDerivedFromFields = table =>
|
|
|
138
146
|
}
|
|
139
147
|
)
|
|
140
148
|
|
|
141
|
-
let getFieldNames = table => {
|
|
142
|
-
table->getFields->Array.map(getDbFieldName)
|
|
143
|
-
}
|
|
144
|
-
|
|
145
149
|
let getNonDefaultFieldNames = table => {
|
|
146
150
|
table->getNonDefaultFields->Array.map(getDbFieldName)
|
|
147
151
|
}
|
|
148
152
|
|
|
149
|
-
let getFieldByName = (table,
|
|
150
|
-
table.fields->Js.Array2.find(field => field->getUserDefinedFieldName
|
|
153
|
+
let getFieldByName = (table, fieldName) =>
|
|
154
|
+
table.fields->Js.Array2.find(field => field->getUserDefinedFieldName === fieldName)
|
|
155
|
+
|
|
156
|
+
let getFieldByDbName = (table, dbFieldName) =>
|
|
157
|
+
table.fields->Js.Array2.find(field =>
|
|
158
|
+
switch field {
|
|
159
|
+
| Field(f) => f->getDbFieldName
|
|
160
|
+
| DerivedFrom({fieldName}) => fieldName
|
|
161
|
+
} === dbFieldName
|
|
162
|
+
)
|
|
151
163
|
|
|
152
164
|
exception NonExistingTableField(string)
|
|
153
165
|
|
|
@@ -166,6 +178,92 @@ let getUnfilteredCompositeIndicesUnsafe = (table): array<array<string>> => {
|
|
|
166
178
|
)
|
|
167
179
|
}
|
|
168
180
|
|
|
181
|
+
type sqlParams<'entity> = {
|
|
182
|
+
dbSchema: S.t<'entity>,
|
|
183
|
+
quotedFieldNames: array<string>,
|
|
184
|
+
quotedNonPrimaryFieldNames: array<string>,
|
|
185
|
+
arrayFieldTypes: array<string>,
|
|
186
|
+
hasArrayField: bool,
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
let toSqlParams = (table: table, ~schema) => {
|
|
190
|
+
let quotedFieldNames = []
|
|
191
|
+
let quotedNonPrimaryFieldNames = []
|
|
192
|
+
let arrayFieldTypes = []
|
|
193
|
+
let hasArrayField = ref(false)
|
|
194
|
+
|
|
195
|
+
let dbSchema: S.t<Js.Dict.t<unknown>> = S.schema(s =>
|
|
196
|
+
switch schema->S.classify {
|
|
197
|
+
| Object({items}) =>
|
|
198
|
+
let dict = Js.Dict.empty()
|
|
199
|
+
items->Belt.Array.forEach(({location, inlinedLocation, schema}) => {
|
|
200
|
+
let rec coerceSchema = schema =>
|
|
201
|
+
switch schema->S.classify {
|
|
202
|
+
| BigInt => BigInt.schema->S.toUnknown
|
|
203
|
+
| Option(child)
|
|
204
|
+
| Null(child) =>
|
|
205
|
+
S.null(child->coerceSchema)->S.toUnknown
|
|
206
|
+
| Array(child) => {
|
|
207
|
+
hasArrayField := true
|
|
208
|
+
S.array(child->coerceSchema)->S.toUnknown
|
|
209
|
+
}
|
|
210
|
+
| JSON(_) => {
|
|
211
|
+
hasArrayField := true
|
|
212
|
+
schema
|
|
213
|
+
}
|
|
214
|
+
| Bool =>
|
|
215
|
+
// Workaround for https://github.com/porsager/postgres/issues/471
|
|
216
|
+
S.union([
|
|
217
|
+
S.literal("t")->S.to(_ => true),
|
|
218
|
+
S.literal("f")->S.to(_ => false),
|
|
219
|
+
])->S.toUnknown
|
|
220
|
+
| _ => schema
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
let field = switch table->getFieldByDbName(location) {
|
|
224
|
+
| Some(field) => field
|
|
225
|
+
| None => raise(NonExistingTableField(location))
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
quotedFieldNames
|
|
229
|
+
->Js.Array2.push(inlinedLocation)
|
|
230
|
+
->ignore
|
|
231
|
+
switch field {
|
|
232
|
+
| Field({isPrimaryKey: false}) =>
|
|
233
|
+
quotedNonPrimaryFieldNames
|
|
234
|
+
->Js.Array2.push(inlinedLocation)
|
|
235
|
+
->ignore
|
|
236
|
+
| _ => ()
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
arrayFieldTypes
|
|
240
|
+
->Js.Array2.push(
|
|
241
|
+
switch field {
|
|
242
|
+
| Field(f) =>
|
|
243
|
+
switch f.fieldType {
|
|
244
|
+
| Custom(fieldType) => `${(Text :> string)}[]::${(fieldType :> string)}`
|
|
245
|
+
| fieldType => (fieldType :> string)
|
|
246
|
+
}
|
|
247
|
+
| DerivedFrom(_) => (Text :> string)
|
|
248
|
+
} ++ "[]",
|
|
249
|
+
)
|
|
250
|
+
->ignore
|
|
251
|
+
dict->Js.Dict.set(location, s.matches(schema->coerceSchema))
|
|
252
|
+
})
|
|
253
|
+
dict
|
|
254
|
+
| _ => Js.Exn.raiseError("Failed creating db schema. Expected an object schema for table")
|
|
255
|
+
}
|
|
256
|
+
)
|
|
257
|
+
|
|
258
|
+
{
|
|
259
|
+
dbSchema: dbSchema->(Utils.magic: S.t<dict<unknown>> => S.t<'entity>),
|
|
260
|
+
quotedFieldNames,
|
|
261
|
+
quotedNonPrimaryFieldNames,
|
|
262
|
+
arrayFieldTypes,
|
|
263
|
+
hasArrayField: hasArrayField.contents,
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
|
|
169
267
|
/*
|
|
170
268
|
Gets all single indicies
|
|
171
269
|
And maps the fields defined to their actual db name (some have _id suffix)
|
|
@@ -240,11 +338,17 @@ module PostgresInterop = {
|
|
|
240
338
|
Promise.all(responses)
|
|
241
339
|
}
|
|
242
340
|
|
|
243
|
-
let makeBatchSetFn = (~table, ~
|
|
341
|
+
let makeBatchSetFn = (~table, ~schema: S.t<'a>): batchSetFn<'a> => {
|
|
244
342
|
let batchSetFn: pgFn<array<Js.Json.t>, unit> = table->makeBatchSetFnString->eval
|
|
343
|
+
let parseOrThrow = S.compile(
|
|
344
|
+
S.array(schema),
|
|
345
|
+
~input=Value,
|
|
346
|
+
~output=Json,
|
|
347
|
+
~mode=Sync,
|
|
348
|
+
~typeValidation=true,
|
|
349
|
+
)
|
|
245
350
|
async (sql, rows) => {
|
|
246
|
-
let rowsJson =
|
|
247
|
-
rows->S.serializeOrRaiseWith(rowsSchema)->(Utils.magic: Js.Json.t => array<Js.Json.t>)
|
|
351
|
+
let rowsJson = rows->parseOrThrow->(Utils.magic: Js.Json.t => array<Js.Json.t>)
|
|
248
352
|
let _res = await chunkBatchQuery(sql, rowsJson, batchSetFn)
|
|
249
353
|
}
|
|
250
354
|
}
|
|
@@ -22,7 +22,7 @@ module QueryTypes = {
|
|
|
22
22
|
| @as("uncles") Uncles
|
|
23
23
|
| @as("base_fee_per_gas") BaseFeePerGas
|
|
24
24
|
|
|
25
|
-
let blockFieldOptionsSchema =
|
|
25
|
+
let blockFieldOptionsSchema = S.enum([
|
|
26
26
|
Number,
|
|
27
27
|
Hash,
|
|
28
28
|
ParentHash,
|
|
@@ -76,7 +76,7 @@ module QueryTypes = {
|
|
|
76
76
|
| @as("status") Status
|
|
77
77
|
| @as("sighash") Sighash
|
|
78
78
|
|
|
79
|
-
let transactionFieldOptionsSchema =
|
|
79
|
+
let transactionFieldOptionsSchema = S.enum([
|
|
80
80
|
BlockHash,
|
|
81
81
|
BlockNumber,
|
|
82
82
|
From,
|
|
@@ -123,7 +123,7 @@ module QueryTypes = {
|
|
|
123
123
|
| @as("topic2") Topic2
|
|
124
124
|
| @as("topic3") Topic3
|
|
125
125
|
|
|
126
|
-
let logFieldOptionsSchema =
|
|
126
|
+
let logFieldOptionsSchema = S.enum([
|
|
127
127
|
Removed,
|
|
128
128
|
LogIndex,
|
|
129
129
|
TransactionIndex,
|
package/src/vendored/Rest.res
CHANGED
|
@@ -34,6 +34,7 @@ module Promise = {
|
|
|
34
34
|
|
|
35
35
|
module Option = {
|
|
36
36
|
let unsafeSome: 'a => option<'a> = Obj.magic
|
|
37
|
+
let unsafeUnwrap: option<'a> => 'a = Obj.magic
|
|
37
38
|
}
|
|
38
39
|
|
|
39
40
|
module Dict = {
|
|
@@ -328,6 +329,28 @@ let coerceSchema = schema => {
|
|
|
328
329
|
})
|
|
329
330
|
}
|
|
330
331
|
|
|
332
|
+
let stripInPlace = schema => (schema->S.classify->Obj.magic)["unknownKeys"] = S.Strip
|
|
333
|
+
let getSchemaField = (schema, fieldName): option<S.item> =>
|
|
334
|
+
(schema->S.classify->Obj.magic)["fields"]->Js.Dict.unsafeGet(fieldName)
|
|
335
|
+
|
|
336
|
+
type typeValidation = (unknown, ~inputVar: string) => string
|
|
337
|
+
let removeTypeValidationInPlace = schema => (schema->Obj.magic)["f"] = ()
|
|
338
|
+
let setTypeValidationInPlace = (schema, typeValidation: typeValidation) =>
|
|
339
|
+
(schema->Obj.magic)["f"] = typeValidation
|
|
340
|
+
let unsafeGetTypeValidationInPlace = (schema): typeValidation => (schema->Obj.magic)["f"]
|
|
341
|
+
|
|
342
|
+
let isNestedFlattenSupported = schema =>
|
|
343
|
+
switch schema->S.classify {
|
|
344
|
+
| Object({advanced: false}) =>
|
|
345
|
+
switch schema
|
|
346
|
+
->S.reverse
|
|
347
|
+
->S.classify {
|
|
348
|
+
| Object({advanced: false}) => true
|
|
349
|
+
| _ => false
|
|
350
|
+
}
|
|
351
|
+
| _ => false
|
|
352
|
+
}
|
|
353
|
+
|
|
331
354
|
let bearerAuthSchema = S.string->S.transform(s => {
|
|
332
355
|
serializer: token => {
|
|
333
356
|
`Bearer ${token}`
|
|
@@ -372,10 +395,14 @@ let params = route => {
|
|
|
372
395
|
let variablesSchema = S.object(s => {
|
|
373
396
|
routeDefinition.variables({
|
|
374
397
|
field: (fieldName, schema) => {
|
|
375
|
-
s.
|
|
398
|
+
s.nested("body").field(fieldName, schema)
|
|
376
399
|
},
|
|
377
400
|
body: schema => {
|
|
378
|
-
|
|
401
|
+
if schema->isNestedFlattenSupported {
|
|
402
|
+
s.nested("body").flatten(schema)
|
|
403
|
+
} else {
|
|
404
|
+
s.field("body", schema)
|
|
405
|
+
}
|
|
379
406
|
},
|
|
380
407
|
rawBody: schema => {
|
|
381
408
|
let isNonStringBased = switch schema->S.classify {
|
|
@@ -390,20 +417,19 @@ let params = route => {
|
|
|
390
417
|
s.field("body", schema)
|
|
391
418
|
},
|
|
392
419
|
header: (fieldName, schema) => {
|
|
393
|
-
s.
|
|
420
|
+
s.nested("headers").field(fieldName->Js.String2.toLowerCase, coerceSchema(schema))
|
|
394
421
|
},
|
|
395
422
|
query: (fieldName, schema) => {
|
|
396
|
-
s.
|
|
423
|
+
s.nested("query").field(fieldName, coerceSchema(schema))
|
|
397
424
|
},
|
|
398
425
|
param: (fieldName, schema) => {
|
|
399
426
|
if !Dict.has(pathParams, fieldName) {
|
|
400
427
|
panic(`Path parameter "${fieldName}" is not defined in the path`)
|
|
401
428
|
}
|
|
402
|
-
s.
|
|
429
|
+
s.nested("params").field(fieldName, coerceSchema(schema))
|
|
403
430
|
},
|
|
404
431
|
auth: auth => {
|
|
405
|
-
s.
|
|
406
|
-
"headers",
|
|
432
|
+
s.nested("headers").field(
|
|
407
433
|
"authorization",
|
|
408
434
|
switch auth {
|
|
409
435
|
| Bearer => bearerAuthSchema
|
|
@@ -416,14 +442,22 @@ let params = route => {
|
|
|
416
442
|
|
|
417
443
|
{
|
|
418
444
|
// The variables input is guaranteed to be an object, so we reset the rescript-schema type filter here
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
}
|
|
445
|
+
variablesSchema->stripInPlace
|
|
446
|
+
variablesSchema->removeTypeValidationInPlace
|
|
447
|
+
switch variablesSchema->getSchemaField("headers") {
|
|
448
|
+
| Some({schema}) =>
|
|
449
|
+
schema->stripInPlace
|
|
450
|
+
schema->removeTypeValidationInPlace
|
|
451
|
+
| None => ()
|
|
452
|
+
}
|
|
453
|
+
switch variablesSchema->getSchemaField("params") {
|
|
454
|
+
| Some({schema}) => schema->removeTypeValidationInPlace
|
|
455
|
+
| None => ()
|
|
456
|
+
}
|
|
457
|
+
switch variablesSchema->getSchemaField("query") {
|
|
458
|
+
| Some({schema}) => schema->removeTypeValidationInPlace
|
|
459
|
+
| None => ()
|
|
460
|
+
}
|
|
427
461
|
}
|
|
428
462
|
|
|
429
463
|
let responsesMap = Js.Dict.empty()
|
|
@@ -443,14 +477,18 @@ let params = route => {
|
|
|
443
477
|
description: d => builder.description = Some(d),
|
|
444
478
|
field: (fieldName, schema) => {
|
|
445
479
|
builder.emptyData = false
|
|
446
|
-
s.
|
|
480
|
+
s.nested("data").field(fieldName, schema)
|
|
447
481
|
},
|
|
448
482
|
data: schema => {
|
|
449
483
|
builder.emptyData = false
|
|
450
|
-
|
|
484
|
+
if schema->isNestedFlattenSupported {
|
|
485
|
+
s.nested("data").flatten(schema)
|
|
486
|
+
} else {
|
|
487
|
+
s.field("data", schema)
|
|
488
|
+
}
|
|
451
489
|
},
|
|
452
490
|
header: (fieldName, schema) => {
|
|
453
|
-
s.
|
|
491
|
+
s.nested("headers").field(fieldName->Js.String2.toLowerCase, coerceSchema(schema))
|
|
454
492
|
},
|
|
455
493
|
})
|
|
456
494
|
if builder.emptyData {
|
|
@@ -461,8 +499,25 @@ let params = route => {
|
|
|
461
499
|
if builder.status === None {
|
|
462
500
|
responsesMap->Response.register(#default, builder)
|
|
463
501
|
}
|
|
464
|
-
|
|
465
|
-
|
|
502
|
+
schema->stripInPlace
|
|
503
|
+
schema->removeTypeValidationInPlace
|
|
504
|
+
let dataSchema = (schema->getSchemaField("data")->Option.unsafeUnwrap).schema
|
|
505
|
+
builder.dataSchema = dataSchema->Option.unsafeSome
|
|
506
|
+
switch dataSchema->S.classify {
|
|
507
|
+
| Literal(_) => {
|
|
508
|
+
let dataTypeValidation = dataSchema->unsafeGetTypeValidationInPlace
|
|
509
|
+
schema->setTypeValidationInPlace((b, ~inputVar) =>
|
|
510
|
+
dataTypeValidation(b, ~inputVar=`${inputVar}.data`)
|
|
511
|
+
)
|
|
512
|
+
}
|
|
513
|
+
| _ => ()
|
|
514
|
+
}
|
|
515
|
+
switch schema->getSchemaField("headers") {
|
|
516
|
+
| Some({schema}) =>
|
|
517
|
+
schema->stripInPlace
|
|
518
|
+
schema->removeTypeValidationInPlace
|
|
519
|
+
| None => ()
|
|
520
|
+
}
|
|
466
521
|
builder.schema = Option.unsafeSome(schema)
|
|
467
522
|
responses
|
|
468
523
|
->Js.Array2.push(builder->(Obj.magic: Response.builder<unknown> => Response.t<unknown>))
|
|
@@ -606,7 +661,7 @@ let fetch = (
|
|
|
606
661
|
|
|
607
662
|
let {definition, variablesSchema, responsesMap, pathItems, isRawBody} = route->params
|
|
608
663
|
|
|
609
|
-
let data = variables->S.
|
|
664
|
+
let data = variables->S.reverseConvertOrThrow(variablesSchema)->Obj.magic
|
|
610
665
|
|
|
611
666
|
if data["body"] !== %raw(`void 0`) {
|
|
612
667
|
if !isRawBody {
|
|
@@ -643,9 +698,20 @@ let fetch = (
|
|
|
643
698
|
|
|
644
699
|
panic(error.contents)
|
|
645
700
|
| Some(response) =>
|
|
646
|
-
fetcherResponse
|
|
647
|
-
->S.
|
|
648
|
-
->(Obj.magic: unknown => response)
|
|
701
|
+
try fetcherResponse
|
|
702
|
+
->S.parseOrThrow(response.schema)
|
|
703
|
+
->(Obj.magic: unknown => response) catch {
|
|
704
|
+
| S.Raised({path, code: InvalidType({expected, received})}) if path === S.Path.empty =>
|
|
705
|
+
panic(
|
|
706
|
+
`Failed parsing response data. Reason: Expected ${(
|
|
707
|
+
expected->getSchemaField("data")->Option.unsafeUnwrap
|
|
708
|
+
).schema->S.name}, received ${(received->Obj.magic)["data"]->Obj.magic}`,
|
|
709
|
+
)
|
|
710
|
+
| S.Raised(error) =>
|
|
711
|
+
panic(
|
|
712
|
+
`Failed parsing response at ${error.path->S.Path.toString}. Reason: ${error->S.Error.reason}`,
|
|
713
|
+
)
|
|
714
|
+
}
|
|
649
715
|
}
|
|
650
716
|
})
|
|
651
717
|
}
|