@f3liz/rescript-autogen-openapi 0.2.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of @f3liz/rescript-autogen-openapi might be problematic. Click here for more details.

@@ -118,7 +118,7 @@ function generateNamedType(namedSchema) {
118
118
  if (typeof match !== "object" || match.TAG !== "Object") {
119
119
  exit = 1;
120
120
  } else {
121
- declaration = `export interface ` + namedSchema.name + ` ` + typeCode;
121
+ declaration = typeCode === "Record<string, never>" ? `export type ` + namedSchema.name + ` = ` + typeCode + `;` : `export interface ` + namedSchema.name + ` ` + typeCode;
122
122
  }
123
123
  if (exit === 1) {
124
124
  declaration = `export type ` + namedSchema.name + ` = ` + typeCode + `;`;
@@ -31,7 +31,7 @@ function generateSchemaCodeForDict(schemaDict) {
31
31
  name: name + `Schema`,
32
32
  description: schema.description,
33
33
  type_: ir
34
- }, undefined, undefined, undefined);
34
+ }, undefined, undefined, undefined, match$1[2]);
35
35
  return [
36
36
  CodegenUtils.indent(match$1[0], 2),
37
37
  CodegenUtils.indent(match$2[0], 2),
@@ -20,7 +20,7 @@ function generateTypeCodeAndSchemaCode(name, schema) {
20
20
  name: name + `Schema`,
21
21
  description: schema.description,
22
22
  type_: ir
23
- }, undefined, undefined, undefined);
23
+ }, undefined, undefined, undefined, match$1[2]);
24
24
  return match$1[0] + `\n\n` + match$2[0];
25
25
  }
26
26
 
@@ -1,15 +1,19 @@
1
1
  // Generated by ReScript, PLEASE EDIT WITH CARE
2
2
 
3
+ import * as SchemaIR from "../core/SchemaIR.mjs";
3
4
 
4
- function make(path, insideComponentSchemasOpt, availableSchemas, modulePrefixOpt, param) {
5
+ function make(path, insideComponentSchemasOpt, availableSchemas, modulePrefixOpt, selfRefName, param) {
5
6
  let insideComponentSchemas = insideComponentSchemasOpt !== undefined ? insideComponentSchemasOpt : false;
6
7
  let modulePrefix = modulePrefixOpt !== undefined ? modulePrefixOpt : "";
7
8
  return {
8
9
  warnings: [],
10
+ extractedTypes: [],
11
+ extractCounter: 0,
9
12
  path: path,
10
13
  insideComponentSchemas: insideComponentSchemas,
11
14
  availableSchemas: availableSchemas,
12
- modulePrefix: modulePrefix
15
+ modulePrefix: modulePrefix,
16
+ selfRefName: selfRefName
13
17
  };
14
18
  }
15
19
 
@@ -17,8 +21,27 @@ function addWarning(ctx, warning) {
17
21
  ctx.warnings.push(warning);
18
22
  }
19
23
 
24
+ function extractType(ctx, baseName, isUnboxedOpt, irType) {
25
+ let isUnboxed = isUnboxedOpt !== undefined ? isUnboxedOpt : false;
26
+ let existing = ctx.extractedTypes.find(param => SchemaIR.equals(param.irType, irType));
27
+ if (existing !== undefined) {
28
+ return existing.typeName;
29
+ }
30
+ ctx.extractCounter = ctx.extractCounter + 1 | 0;
31
+ let first = baseName.charAt(0);
32
+ let lowerBaseName = first === "" ? "extracted" : first.toLowerCase() + baseName.slice(1);
33
+ let typeName = lowerBaseName + `_` + ctx.extractCounter.toString();
34
+ ctx.extractedTypes.push({
35
+ typeName: typeName,
36
+ irType: irType,
37
+ isUnboxed: isUnboxed
38
+ });
39
+ return typeName;
40
+ }
41
+
20
42
  export {
21
43
  make,
22
44
  addWarning,
45
+ extractType,
23
46
  }
24
47
  /* No side effect */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@f3liz/rescript-autogen-openapi",
3
- "version": "0.2.0",
3
+ "version": "0.3.0",
4
4
  "description": "Generate ReScript code with Sury schemas from OpenAPI 3.1 specs. Supports multiple forks with diff/merge capabilities.",
5
5
  "keywords": [
6
6
  "rescript",
@@ -38,7 +38,8 @@
38
38
  "dependencies": {
39
39
  "@readme/openapi-parser": "^5.5.0",
40
40
  "js-convert-case": "^4.2.0",
41
- "pathe": "^2.0.3"
41
+ "pathe": "^2.0.3",
42
+ "toposort": "^2.0.2"
42
43
  },
43
44
  "devDependencies": {
44
45
  "rescript": "^12.1.0",
@@ -0,0 +1,16 @@
1
+ // SPDX-License-Identifier: MPL-2.0
2
+
3
+ // Toposort.res - ReScript bindings for the toposort npm package
4
+
5
+ type toposortModule
6
+
7
+ @module("toposort") external toposortModule: toposortModule = "default"
8
+
9
+ // toposort.array(nodes, edges) — sort nodes topologically given directed edges
10
+ // edges: array of [from, to] meaning "from depends on to" (to must come before from)
11
+ // Returns sorted array (dependencies first)
12
+ // Throws on cycles
13
+ @send
14
+ external array: (toposortModule, array<string>, array<(string, string)>) => array<string> = "array"
15
+
16
+ let sortArray = (nodes, edges) => toposortModule->array(nodes, edges)
@@ -128,3 +128,53 @@ let generateDocString = (~summary=?, ~description=?, ()): string => {
128
128
 
129
129
  // Shared type signature for the fetch function used in generated code
130
130
  let fetchTypeSignature = "(~url: string, ~method_: string, ~body: option<JSON.t>) => Promise.t<JSON.t>"
131
+
132
+ // Generate variant constructor name from an IR type
133
+ let rec variantConstructorName = (irType: SchemaIR.irType): string => {
134
+ switch irType {
135
+ | String(_) => "String"
136
+ | Number(_) => "Float"
137
+ | Integer(_) => "Int"
138
+ | Boolean => "Bool"
139
+ | Null => "Null"
140
+ | Array(_) => "Array"
141
+ | Object(_) => "Object"
142
+ | Reference(ref) =>
143
+ let name = if ref->String.includes("/") {
144
+ ref->String.split("/")->Array.get(ref->String.split("/")->Array.length - 1)->Option.getOr("Ref")
145
+ } else {
146
+ ref
147
+ }
148
+ toPascalCase(name)
149
+ | Literal(StringLiteral(s)) => toPascalCase(s)
150
+ | Literal(NumberLiteral(_)) => "Number"
151
+ | Literal(BooleanLiteral(_)) => "Bool"
152
+ | Literal(NullLiteral) => "Null"
153
+ | Option(inner) => variantConstructorName(inner)
154
+ | Intersection(_) => "Intersection"
155
+ | Union(_) => "Union"
156
+ | Unknown => "Unknown"
157
+ }
158
+ }
159
+
160
+ // Deduplicate variant constructor names by appending counter suffix
161
+ let deduplicateNames = (names: array<string>): array<string> => {
162
+ let counts: Dict.t<int> = Dict.make()
163
+ let result: array<string> = []
164
+ names->Array.forEach(name => {
165
+ let count = counts->Dict.get(name)->Option.getOr(0)
166
+ counts->Dict.set(name, count + 1)
167
+ })
168
+ let seen: Dict.t<int> = Dict.make()
169
+ names->Array.forEach(name => {
170
+ let total = counts->Dict.get(name)->Option.getOr(1)
171
+ if total > 1 {
172
+ let idx = seen->Dict.get(name)->Option.getOr(0) + 1
173
+ seen->Dict.set(name, idx)
174
+ result->Array.push(`${name}${Int.toString(idx)}`)
175
+ } else {
176
+ result->Array.push(name)
177
+ }
178
+ })
179
+ result
180
+ }
@@ -205,6 +205,39 @@ let rec equals = (a: irType, b: irType): bool => {
205
205
  | (Boolean, Boolean) => true
206
206
  | (Null, Null) => true
207
207
  | (Array({items: itemsA, _}), Array({items: itemsB, _})) => equals(itemsA, itemsB)
208
+ | (Object({properties: propsA, additionalProperties: addA}), Object({properties: propsB, additionalProperties: addB})) =>
209
+ Array.length(propsA) == Array.length(propsB) &&
210
+ propsA->Array.everyWithIndex((propA, i) => {
211
+ switch propsB->Array.get(i) {
212
+ | Some(propB) => {
213
+ let (nameA, typeA, reqA) = propA
214
+ let (nameB, typeB, reqB) = propB
215
+ nameA == nameB && reqA == reqB && equals(typeA, typeB)
216
+ }
217
+ | None => false
218
+ }
219
+ }) &&
220
+ switch (addA, addB) {
221
+ | (Some(a), Some(b)) => equals(a, b)
222
+ | (None, None) => true
223
+ | _ => false
224
+ }
225
+ | (Union(typesA), Union(typesB)) =>
226
+ Array.length(typesA) == Array.length(typesB) &&
227
+ typesA->Array.everyWithIndex((tA, i) =>
228
+ switch typesB->Array.get(i) {
229
+ | Some(tB) => equals(tA, tB)
230
+ | None => false
231
+ }
232
+ )
233
+ | (Intersection(typesA), Intersection(typesB)) =>
234
+ Array.length(typesA) == Array.length(typesB) &&
235
+ typesA->Array.everyWithIndex((tA, i) =>
236
+ switch typesB->Array.get(i) {
237
+ | Some(tB) => equals(tA, tB)
238
+ | None => false
239
+ }
240
+ )
208
241
  | (Reference(refA), Reference(refB)) => refA == refB
209
242
  | (Option(innerA), Option(innerB)) => equals(innerA, innerB)
210
243
  | (Literal(litA), Literal(litB)) => {
@@ -5,6 +5,7 @@
5
5
  // Helper to convert raw JSON type string to our variant
6
6
  // This is needed because Obj.magic from JSON gives us raw strings like "string", "object", etc.
7
7
  // but our variant constructors compile to "String", "Object", etc. in JS
8
+ // Also handles OpenAPI 3.1 array form: type: ["string", "null"]
8
9
  let parseTypeString = (rawType: Types.jsonSchemaType): Types.jsonSchemaType => {
9
10
  // The rawType might actually be a raw string from JSON, so we need to handle that
10
11
  // We use Obj.magic to get the underlying JS value and check it
@@ -34,6 +35,21 @@ let parseTypeString = (rawType: Types.jsonSchemaType): Types.jsonSchemaType => {
34
35
  }
35
36
  }
36
37
 
38
+ // Check if the type field is an array (OpenAPI 3.1: type: ["string", "null"])
39
+ // Returns Some(array of parsed types) if array, None otherwise
40
+ let parseTypeAsArray = (rawType: Types.jsonSchemaType): option<array<Types.jsonSchemaType>> => {
41
+ let raw: 'a = Obj.magic(rawType)
42
+ if Array.isArray(raw) {
43
+ let arr: array<string> = Obj.magic(raw)
44
+ Some(arr->Array.map(s => {
45
+ let t: Types.jsonSchemaType = Obj.magic(s)
46
+ parseTypeString(t)
47
+ }))
48
+ } else {
49
+ None
50
+ }
51
+ }
52
+
37
53
  // Parsing context to collect warnings
38
54
  type parsingContext = {
39
55
  mutable warnings: array<Types.warning>,
@@ -68,6 +84,44 @@ let rec parseJsonSchemaWithContext = (
68
84
  // Check if nullable
69
85
  let isNullable = schema.nullable->Option.getOr(false)
70
86
 
87
+ // Handle OpenAPI 3.1 array type: type: ["string", "null"]
88
+ // But skip if anyOf/oneOf is present — those are more specific
89
+ let hasComposition = schema.anyOf->Option.isSome || schema.oneOf->Option.isSome
90
+ let typeAsArray = if hasComposition {
91
+ None
92
+ } else {
93
+ schema.type_->Option.flatMap(parseTypeAsArray)
94
+ }
95
+ // When composition is preferred, check if the type_ is actually an array
96
+ // and clear it so the None branch runs the composition handlers
97
+ let schema = if hasComposition && schema.type_->Option.flatMap(parseTypeAsArray)->Option.isSome {
98
+ {...schema, type_: None}
99
+ } else {
100
+ schema
101
+ }
102
+
103
+ switch typeAsArray {
104
+ | Some(types) if Array.length(types) > 1 => {
105
+ // Array type form — convert to Union of parsed types
106
+ let irTypes = types->Array.map(t => {
107
+ let subSchema = {...schema, type_: Some(t), nullable: None}
108
+ parseJsonSchemaWithContext(~ctx, ~depth=depth + 1, subSchema)
109
+ })
110
+ let baseType = SchemaIR.Union(irTypes)
111
+ if isNullable {
112
+ SchemaIR.Option(baseType)
113
+ } else {
114
+ baseType
115
+ }
116
+ }
117
+ | Some(types) if Array.length(types) == 1 => {
118
+ // Single-element array: treat as that type
119
+ let subSchema = {...schema, type_: Some(types->Array.getUnsafe(0)), nullable: None}
120
+ let baseType = parseJsonSchemaWithContext(~ctx, ~depth=depth + 1, subSchema)
121
+ if isNullable { SchemaIR.Option(baseType) } else { baseType }
122
+ }
123
+ | _ => {
124
+
71
125
  // Normalize the type field (raw JSON strings like "string" -> variant String)
72
126
  let normalizedType = schema.type_->Option.map(parseTypeString)
73
127
 
@@ -164,8 +218,8 @@ let rec parseJsonSchemaWithContext = (
164
218
  SchemaIR.Union(literals)
165
219
  }
166
220
  | (_, Some(_), _, _, _) => {
167
- // Has properties, treat as object
168
- parseJsonSchemaWithContext(~ctx, ~depth=depth + 1, {...schema, type_: Some(Object)})
221
+ // Has properties, treat as object (clear nullable to avoid double wrapping)
222
+ parseJsonSchemaWithContext(~ctx, ~depth=depth + 1, {...schema, type_: Some(Object), nullable: None})
169
223
  }
170
224
  | (_, _, Some(schemas), _, _) => {
171
225
  // allOf - intersection
@@ -193,6 +247,8 @@ let rec parseJsonSchemaWithContext = (
193
247
  } else {
194
248
  baseType
195
249
  }
250
+ } // end | _ => (typeAsArray arm)
251
+ } // end switch typeAsArray
196
252
  }
197
253
  }
198
254
  }
@@ -216,6 +272,37 @@ let parseNamedSchema = (~name: string, ~schema: Types.jsonSchema): (SchemaIR.nam
216
272
  }, ctx.warnings)
217
273
  }
218
274
 
275
+ // Normalize $ref paths in IR: "#/components/schemas/Name" → "Name" (when Name exists in available schemas)
276
+ let rec normalizeReferences = (~availableNames: array<string>, irType: SchemaIR.irType): SchemaIR.irType => {
277
+ switch irType {
278
+ | SchemaIR.Reference(ref) => {
279
+ let parts = ref->String.split("/")
280
+ let name = parts->Array.get(parts->Array.length - 1)->Option.getOr("")
281
+ if availableNames->Array.includes(name) {
282
+ SchemaIR.Reference(name)
283
+ } else {
284
+ irType
285
+ }
286
+ }
287
+ | SchemaIR.Array({items, constraints}) =>
288
+ SchemaIR.Array({items: normalizeReferences(~availableNames, items), constraints})
289
+ | SchemaIR.Object({properties, additionalProperties}) => {
290
+ let newProperties = properties->Array.map(((n, t, r)) =>
291
+ (n, normalizeReferences(~availableNames, t), r)
292
+ )
293
+ let newAdditional = additionalProperties->Option.map(t => normalizeReferences(~availableNames, t))
294
+ SchemaIR.Object({properties: newProperties, additionalProperties: newAdditional})
295
+ }
296
+ | SchemaIR.Union(types) =>
297
+ SchemaIR.Union(types->Array.map(t => normalizeReferences(~availableNames, t)))
298
+ | SchemaIR.Intersection(types) =>
299
+ SchemaIR.Intersection(types->Array.map(t => normalizeReferences(~availableNames, t)))
300
+ | SchemaIR.Option(inner) =>
301
+ SchemaIR.Option(normalizeReferences(~availableNames, inner))
302
+ | other => other
303
+ }
304
+ }
305
+
219
306
  // Parse all component schemas
220
307
  let parseComponentSchemas = (schemas: dict<Types.jsonSchema>): (SchemaIR.schemaContext, array<Types.warning>) => {
221
308
  let namedSchemas = Dict.make()
@@ -227,6 +314,13 @@ let parseComponentSchemas = (schemas: dict<Types.jsonSchema>): (SchemaIR.schemaC
227
314
  allWarnings->Array.pushMany(warnings)
228
315
  })
229
316
 
317
+ // Resolve $ref paths in the IR: normalize "#/components/schemas/Name" to just "Name"
318
+ let availableNames = Dict.keysToArray(namedSchemas)
319
+ namedSchemas->Dict.toArray->Array.forEach(((name, namedSchema)) => {
320
+ let resolved = normalizeReferences(~availableNames, namedSchema.type_)
321
+ Dict.set(namedSchemas, name, {...namedSchema, type_: resolved})
322
+ })
323
+
230
324
  ({schemas: namedSchemas}, allWarnings)
231
325
  }
232
326
 
@@ -6,8 +6,8 @@ open Types
6
6
  let rec extractReferencedSchemaNames = (irType: SchemaIR.irType) =>
7
7
  switch irType {
8
8
  | Reference(ref) =>
9
- let parts = ref->String.split("/")
10
- [parts->Array.get(parts->Array.length - 1)->Option.getOr("")]
9
+ // After normalization, ref is just the schema name (no path prefix)
10
+ [ref]
11
11
  | Array({items}) => extractReferencedSchemaNames(items)
12
12
  | Object({properties}) => properties->Array.flatMap(((_name, fieldType, _)) => extractReferencedSchemaNames(fieldType))
13
13
  | Union(types)
@@ -31,88 +31,171 @@ let generate = (~spec, ~outputDir) => {
31
31
  let schemas = Dict.valuesToArray(context.schemas)
32
32
  let schemaNameMap = Dict.fromArray(schemas->Array.map(s => (s.name, s)))
33
33
 
34
- // Map each schema to its internal dependencies (other schemas in the same spec)
35
- let dependencyMap = schemas->Array.reduce(Dict.make(), (acc, schema) => {
34
+ // Build dependency edges for topological sort
35
+ // Edge (A, B) means "A depends on B" so B must come before A
36
+ let allNodes = schemas->Array.map(s => s.name)
37
+ let edges = schemas->Array.flatMap(schema => {
36
38
  let references =
37
39
  extractReferencedSchemaNames(schema.type_)->Array.filter(name =>
38
40
  Dict.has(schemaNameMap, name) && name != schema.name
39
41
  )
40
- Dict.set(acc, schema.name, references)
41
- acc
42
+ references->Array.map(dep => (schema.name, dep))
42
43
  })
43
44
 
44
- // Topological sort (Kahn's algorithm) to handle schema dependencies
45
- let sortedSchemas = []
46
- let inDegreeMap = schemas->Array.reduce(Dict.make(), (acc, schema) => {
47
- let degree = Dict.get(dependencyMap, schema.name)->Option.mapOr(0, Array.length)
48
- Dict.set(acc, schema.name, degree)
49
- acc
50
- })
51
-
52
- let queue = schemas->Array.filter(schema => Dict.get(inDegreeMap, schema.name)->Option.getOr(0) == 0)
53
-
54
- while Array.length(queue) > 0 {
55
- let schema = switch Array.shift(queue) {
56
- | Some(v) => v
57
- | None => schemas->Array.getUnsafe(0) // Should not happen
58
- }
59
- sortedSchemas->Array.push(schema)
60
-
61
- schemas->Array.forEach(otherSchema => {
62
- let dependsOnCurrent =
63
- Dict.get(dependencyMap, otherSchema.name)
64
- ->Option.getOr([])
65
- ->Array.some(name => name == schema.name)
66
-
67
- if dependsOnCurrent {
68
- let currentDegree = Dict.get(inDegreeMap, otherSchema.name)->Option.getOr(0)
69
- let newDegree = currentDegree - 1
70
- Dict.set(inDegreeMap, otherSchema.name, newDegree)
71
- if newDegree == 0 {
72
- queue->Array.push(otherSchema)
73
- }
45
+ // Use toposort with cycle tolerance: if there's a cycle, catch and fall back
46
+ // Note: toposort returns dependents first, dependencies last.
47
+ // We reverse to get execution order (dependencies first).
48
+ let sortedNames = try {
49
+ Toposort.sortArray(allNodes, edges)->Array.toReversed
50
+ } catch {
51
+ | _ =>
52
+ // Cycles exist — remove back-edges and re-sort
53
+ let visited = Dict.make()
54
+ let inStack = Dict.make()
55
+ let cycleEdges: array<(string, string)> = []
56
+
57
+ let rec dfs = (node) => {
58
+ if Dict.get(inStack, node)->Option.getOr(false) {
59
+ ()
60
+ } else if Dict.get(visited, node)->Option.getOr(false) {
61
+ ()
62
+ } else {
63
+ Dict.set(visited, node, true)
64
+ Dict.set(inStack, node, true)
65
+ edges->Array.forEach(((from, to)) => {
66
+ if from == node {
67
+ if Dict.get(inStack, to)->Option.getOr(false) {
68
+ cycleEdges->Array.push((from, to))
69
+ } else {
70
+ dfs(to)
71
+ }
72
+ }
73
+ })
74
+ Dict.set(inStack, node, false)
74
75
  }
75
- })
76
+ }
77
+ allNodes->Array.forEach(dfs)
78
+
79
+ let nonCycleEdges = edges->Array.filter(((from, to)) =>
80
+ !(cycleEdges->Array.some(((cf, ct)) => cf == from && ct == to))
81
+ )
82
+ try {
83
+ Toposort.sortArray(allNodes, nonCycleEdges)->Array.toReversed
84
+ } catch {
85
+ | _ => allNodes->Array.toSorted((a, b) => String.compare(a, b))
86
+ }
76
87
  }
77
88
 
78
- // Ensure all schemas are included even if there's a circular dependency
79
- let sortedNames = sortedSchemas->Array.map(s => s.name)
80
- let remainingSchemas =
81
- schemas
82
- ->Array.filter(s => !(sortedNames->Array.some(name => name == s.name)))
83
- ->Array.toSorted((a, b) => String.compare(a.name, b.name))
84
-
85
- let finalSortedSchemas = Array.concat(sortedSchemas, remainingSchemas)
89
+ let finalSortedSchemas = sortedNames->Array.filterMap(name => Dict.get(schemaNameMap, name))
86
90
  let availableSchemaNames = finalSortedSchemas->Array.map(s => s.name)
87
91
  let warnings = Array.copy(parseWarnings)
88
92
 
93
+ // Detect self-referencing schemas (schema references itself directly or indirectly through properties)
94
+ let selfRefSchemas = Dict.make()
95
+ finalSortedSchemas->Array.forEach(schema => {
96
+ let refs = extractReferencedSchemaNames(schema.type_)
97
+ if refs->Array.some(name => name == schema.name) {
98
+ Dict.set(selfRefSchemas, schema.name, true)
99
+ }
100
+ })
101
+
89
102
  let moduleCodes = finalSortedSchemas->Array.map(schema => {
103
+ let isSelfRef = Dict.get(selfRefSchemas, schema.name)->Option.getOr(false)
104
+ let selfRefName = isSelfRef ? Some(schema.name) : None
105
+
90
106
  let typeCtx = GenerationContext.make(
91
107
  ~path=`ComponentSchemas.${schema.name}`,
92
108
  ~insideComponentSchemas=true,
93
109
  ~availableSchemas=availableSchemaNames,
110
+ ~selfRefName?,
94
111
  (),
95
112
  )
113
+
114
+ let typeCode = IRToTypeGenerator.generateTypeWithContext(~ctx=typeCtx, ~depth=0, schema.type_)
115
+
116
+ // Iteratively resolve nested extractions using typeCtx
117
+ let processed = ref(0)
118
+ while processed.contents < Array.length(typeCtx.extractedTypes) {
119
+ let idx = processed.contents
120
+ let {irType, isUnboxed, _}: GenerationContext.extractedType = typeCtx.extractedTypes->Array.getUnsafe(idx)
121
+ if !isUnboxed {
122
+ ignore(IRToTypeGenerator.generateTypeWithContext(~ctx=typeCtx, ~depth=0, ~inline=false, irType))
123
+ } else {
124
+ switch irType {
125
+ | Union(types) =>
126
+ types->Array.forEach(memberType => {
127
+ ignore(IRToTypeGenerator.generateTypeWithContext(~ctx=typeCtx, ~depth=0, ~inline=true, memberType))
128
+ })
129
+ | _ => ignore(IRToTypeGenerator.generateTypeWithContext(~ctx=typeCtx, ~depth=0, ~inline=false, irType))
130
+ }
131
+ }
132
+ processed := idx + 1
133
+ }
134
+
135
+ let allExtracted = Array.copy(typeCtx.extractedTypes)->Array.toReversed
136
+ let extractedTypeMap = if Array.length(allExtracted) > 0 { Some(allExtracted) } else { None }
137
+
138
+ // Generate schema with extracted type map for correct references
96
139
  let schemaCtx = GenerationContext.make(
97
140
  ~path=`ComponentSchemas.${schema.name}`,
98
141
  ~insideComponentSchemas=true,
99
142
  ~availableSchemas=availableSchemaNames,
143
+ ~selfRefName?,
100
144
  (),
101
145
  )
102
-
103
- let typeCode = IRToTypeGenerator.generateTypeWithContext(~ctx=typeCtx, ~depth=0, schema.type_)
104
- let schemaCode = IRToSuryGenerator.generateSchemaWithContext(~ctx=schemaCtx, ~depth=0, schema.type_)
146
+ let schemaCode = IRToSuryGenerator.generateSchemaWithContext(~ctx=schemaCtx, ~depth=0, ~extractedTypeMap?, schema.type_)
105
147
 
106
148
  warnings->Array.pushMany(typeCtx.warnings)
107
149
  warnings->Array.pushMany(schemaCtx.warnings)
108
150
 
151
+ // Generate extracted auxiliary types and schemas (use ctx for dedup)
152
+ let extractedTypeDefs = allExtracted->Array.map(({typeName, irType, isUnboxed}: GenerationContext.extractedType) => {
153
+ let auxTypeCode = if isUnboxed {
154
+ switch irType {
155
+ | Union(types) =>
156
+ let body = IRToTypeGenerator.generateUnboxedVariantBody(~ctx=typeCtx, types)
157
+ `@unboxed type ${typeName} = ${body}`
158
+ | _ =>
159
+ let auxType = IRToTypeGenerator.generateTypeWithContext(~ctx=typeCtx, ~depth=0, irType)
160
+ `type ${typeName} = ${auxType}`
161
+ }
162
+ } else {
163
+ let auxType = IRToTypeGenerator.generateTypeWithContext(~ctx=typeCtx, ~depth=0, irType)
164
+ `type ${typeName} = ${auxType}`
165
+ }
166
+ let auxSchemaCtx = GenerationContext.make(
167
+ ~path=`ComponentSchemas.${schema.name}.${typeName}`,
168
+ ~insideComponentSchemas=true,
169
+ ~availableSchemas=availableSchemaNames,
170
+ (),
171
+ )
172
+ // Exclude the current type from the map to avoid self-reference
173
+ let filteredMap = allExtracted->Array.filter(({typeName: tn}: GenerationContext.extractedType) => tn != typeName)
174
+ let auxExtractedTypeMap = if Array.length(filteredMap) > 0 { Some(filteredMap) } else { None }
175
+ let auxSchema = IRToSuryGenerator.generateSchemaWithContext(~ctx=auxSchemaCtx, ~depth=0, ~extractedTypeMap=?auxExtractedTypeMap, irType)
176
+ ` ${auxTypeCode}\n let ${typeName}Schema = ${auxSchema}`
177
+ })
178
+
109
179
  let docComment = schema.description->Option.mapOr("", d =>
110
180
  CodegenUtils.generateDocString(~description=d, ())
111
181
  )
112
182
 
183
+ let extractedBlock = if Array.length(extractedTypeDefs) > 0 {
184
+ extractedTypeDefs->Array.join("\n") ++ "\n"
185
+ } else {
186
+ ""
187
+ }
188
+
189
+ // Use `type rec t` for self-referential types
190
+ let typeKeyword = isSelfRef ? "type rec t" : "type t"
191
+ // Wrap schema in S.recursive for self-referential types
192
+ let finalSchemaCode = isSelfRef
193
+ ? `S.recursive("${schema.name}", schema => ${schemaCode})`
194
+ : schemaCode
195
+
113
196
  `${docComment}module ${CodegenUtils.toPascalCase(schema.name)} = {
114
- type t = ${typeCode}
115
- let schema = ${schemaCode}
197
+ ${extractedBlock} ${typeKeyword} = ${typeCode}
198
+ let schema = ${finalSchemaCode}
116
199
  }`
117
200
  })
118
201
 
@@ -10,13 +10,14 @@ let getJsonSchemaFromRequestBody = (requestBody: option<requestBody>) =>
10
10
 
11
11
  let generateTypeCodeAndSchemaCode = (~jsonSchema, ~typeName, ~schemaName, ~modulePrefix="") => {
12
12
  let (ir, _) = SchemaIRParser.parseJsonSchema(jsonSchema)
13
- let (typeCode, _) = IRToTypeGenerator.generateNamedType(
13
+ let (typeCode, _, extractedTypes) = IRToTypeGenerator.generateNamedType(
14
14
  ~namedSchema={name: typeName, description: jsonSchema.description, type_: ir},
15
15
  ~modulePrefix,
16
16
  )
17
17
  let (schemaCode, _) = IRToSuryGenerator.generateNamedSchema(
18
18
  ~namedSchema={name: schemaName, description: jsonSchema.description, type_: ir},
19
19
  ~modulePrefix,
20
+ ~extractedTypes,
20
21
  )
21
22
  (typeCode, schemaCode)
22
23
  }
@@ -34,7 +35,10 @@ let generateEndpointFunction = (endpoint: endpoint, ~overrideDir=?, ~moduleName=
34
35
 
35
36
  let bodyParam = hasRequestBody
36
37
  ? (isRequestBodyRequired ? `~body: ${requestTypeName}` : `~body: option<${requestTypeName}>=?`)
37
- : "~body as _"
38
+ : ""
39
+
40
+ // Clean up function signature: handle comma between body and fetch params
41
+ let paramSep = hasRequestBody ? ", " : ""
38
42
 
39
43
  let bodyValueConversion = hasRequestBody
40
44
  ? (
@@ -83,7 +87,7 @@ let generateEndpointFunction = (endpoint: endpoint, ~overrideDir=?, ~moduleName=
83
87
 
84
88
  let code = `
85
89
  |${docComment->String.trimEnd}
86
- |let ${functionName} = (${bodyParam}, ~fetch: ${CodegenUtils.fetchTypeSignature}): promise<${functionName}Response> => {
90
+ |let ${functionName} = (${bodyParam}${paramSep}~fetch: ${CodegenUtils.fetchTypeSignature}): promise<${functionName}Response> => {
87
91
  |${bodyValueConversion}
88
92
  | fetch(
89
93
  | ~url="${endpoint.path}",