@f3liz/rescript-autogen-openapi 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +339 -0
- package/README.md +98 -0
- package/lib/es6/src/Codegen.mjs +423 -0
- package/lib/es6/src/Types.mjs +20 -0
- package/lib/es6/src/core/CodegenUtils.mjs +186 -0
- package/lib/es6/src/core/DocOverride.mjs +399 -0
- package/lib/es6/src/core/FileSystem.mjs +78 -0
- package/lib/es6/src/core/IRBuilder.mjs +201 -0
- package/lib/es6/src/core/OpenAPIParser.mjs +168 -0
- package/lib/es6/src/core/Pipeline.mjs +150 -0
- package/lib/es6/src/core/ReferenceResolver.mjs +41 -0
- package/lib/es6/src/core/Result.mjs +378 -0
- package/lib/es6/src/core/SchemaIR.mjs +355 -0
- package/lib/es6/src/core/SchemaIRParser.mjs +490 -0
- package/lib/es6/src/core/SchemaRefResolver.mjs +146 -0
- package/lib/es6/src/core/SchemaRegistry.mjs +92 -0
- package/lib/es6/src/core/SpecDiffer.mjs +251 -0
- package/lib/es6/src/core/SpecMerger.mjs +237 -0
- package/lib/es6/src/generators/ComponentSchemaGenerator.mjs +125 -0
- package/lib/es6/src/generators/DiffReportGenerator.mjs +155 -0
- package/lib/es6/src/generators/EndpointGenerator.mjs +172 -0
- package/lib/es6/src/generators/IRToSuryGenerator.mjs +233 -0
- package/lib/es6/src/generators/IRToTypeGenerator.mjs +241 -0
- package/lib/es6/src/generators/IRToTypeScriptGenerator.mjs +143 -0
- package/lib/es6/src/generators/ModuleGenerator.mjs +285 -0
- package/lib/es6/src/generators/SchemaCodeGenerator.mjs +77 -0
- package/lib/es6/src/generators/ThinWrapperGenerator.mjs +97 -0
- package/lib/es6/src/generators/TypeScriptDtsGenerator.mjs +172 -0
- package/lib/es6/src/generators/TypeScriptWrapperGenerator.mjs +145 -0
- package/lib/es6/src/types/CodegenError.mjs +79 -0
- package/lib/es6/src/types/Config.mjs +42 -0
- package/lib/es6/src/types/GenerationContext.mjs +24 -0
- package/package.json +44 -0
- package/rescript.json +20 -0
- package/src/Codegen.res +222 -0
- package/src/Types.res +195 -0
- package/src/core/CodegenUtils.res +130 -0
- package/src/core/DocOverride.res +504 -0
- package/src/core/FileSystem.res +62 -0
- package/src/core/IRBuilder.res +66 -0
- package/src/core/OpenAPIParser.res +144 -0
- package/src/core/Pipeline.res +51 -0
- package/src/core/ReferenceResolver.res +41 -0
- package/src/core/Result.res +187 -0
- package/src/core/SchemaIR.res +258 -0
- package/src/core/SchemaIRParser.res +360 -0
- package/src/core/SchemaRefResolver.res +143 -0
- package/src/core/SchemaRegistry.res +107 -0
- package/src/core/SpecDiffer.res +270 -0
- package/src/core/SpecMerger.res +245 -0
- package/src/generators/ComponentSchemaGenerator.res +127 -0
- package/src/generators/DiffReportGenerator.res +152 -0
- package/src/generators/EndpointGenerator.res +172 -0
- package/src/generators/IRToSuryGenerator.res +199 -0
- package/src/generators/IRToTypeGenerator.res +199 -0
- package/src/generators/IRToTypeScriptGenerator.res +72 -0
- package/src/generators/ModuleGenerator.res +362 -0
- package/src/generators/SchemaCodeGenerator.res +83 -0
- package/src/generators/ThinWrapperGenerator.res +124 -0
- package/src/generators/TypeScriptDtsGenerator.res +193 -0
- package/src/generators/TypeScriptWrapperGenerator.res +166 -0
- package/src/types/CodegenError.res +82 -0
- package/src/types/Config.res +89 -0
- package/src/types/GenerationContext.res +23 -0
|
@@ -0,0 +1,270 @@
|
|
|
1
|
+
// SPDX-License-Identifier: MPL-2.0
|
|
2
|
+
|
|
3
|
+
// SpecDiffer.res - Compare two OpenAPI specifications
|
|
4
|
+
open Types
|
|
5
|
+
|
|
6
|
+
// Helper to create endpoint keys for comparison
|
|
7
|
+
let makeEndpointKey = (method: string, path: string): string => {
|
|
8
|
+
`${method}:${path}`
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
// Compare two schemas for equality (deep comparison)
|
|
12
|
+
let rec schemasEqual = (schema1: jsonSchema, schema2: jsonSchema): bool => {
|
|
13
|
+
// Compare type
|
|
14
|
+
let typeMatches = switch (schema1.type_, schema2.type_) {
|
|
15
|
+
| (Some(t1), Some(t2)) => t1 == t2
|
|
16
|
+
| (None, None) => true
|
|
17
|
+
| _ => false
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
if !typeMatches {
|
|
21
|
+
false
|
|
22
|
+
} else if schema1.format != schema2.format {
|
|
23
|
+
false
|
|
24
|
+
} else if schema1.nullable != schema2.nullable {
|
|
25
|
+
false
|
|
26
|
+
} else if schema1.enum != schema2.enum {
|
|
27
|
+
false
|
|
28
|
+
} else {
|
|
29
|
+
// Compare properties for objects
|
|
30
|
+
switch (schema1.properties, schema2.properties) {
|
|
31
|
+
| (Some(props1), Some(props2)) => {
|
|
32
|
+
let keys1 = Dict.keysToArray(props1)->Array.toSorted(String.compare)
|
|
33
|
+
let keys2 = Dict.keysToArray(props2)->Array.toSorted(String.compare)
|
|
34
|
+
|
|
35
|
+
if keys1 != keys2 {
|
|
36
|
+
false
|
|
37
|
+
} else {
|
|
38
|
+
keys1->Array.every(key => {
|
|
39
|
+
switch (Dict.get(props1, key), Dict.get(props2, key)) {
|
|
40
|
+
| (Some(s1), Some(s2)) => schemasEqual(s1, s2)
|
|
41
|
+
| _ => false
|
|
42
|
+
}
|
|
43
|
+
})
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
| (None, None) => true
|
|
47
|
+
| _ => false
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Compare two endpoints
|
|
53
|
+
let compareEndpoints = (endpoint1: endpoint, endpoint2: endpoint): option<endpointDiff> => {
|
|
54
|
+
let hasRequestBodyChanged = switch (endpoint1.requestBody, endpoint2.requestBody) {
|
|
55
|
+
| (Some(rb1), Some(rb2)) => {
|
|
56
|
+
// Compare content types
|
|
57
|
+
let keys1 = Dict.keysToArray(rb1.content)->Array.toSorted(String.compare)
|
|
58
|
+
let keys2 = Dict.keysToArray(rb2.content)->Array.toSorted(String.compare)
|
|
59
|
+
|
|
60
|
+
if keys1 != keys2 {
|
|
61
|
+
true
|
|
62
|
+
} else {
|
|
63
|
+
keys1->Array.some(contentType => {
|
|
64
|
+
switch (Dict.get(rb1.content, contentType), Dict.get(rb2.content, contentType)) {
|
|
65
|
+
| (Some(mt1), Some(mt2)) =>
|
|
66
|
+
switch (mt1.schema, mt2.schema) {
|
|
67
|
+
| (Some(s1), Some(s2)) => !schemasEqual(s1, s2)
|
|
68
|
+
| _ => false
|
|
69
|
+
}
|
|
70
|
+
| _ => false
|
|
71
|
+
}
|
|
72
|
+
})
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
| (None, None) => false
|
|
76
|
+
| _ => true
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
let hasResponseChanged = {
|
|
80
|
+
let codes1 = Dict.keysToArray(endpoint1.responses)->Array.toSorted(String.compare)
|
|
81
|
+
let codes2 = Dict.keysToArray(endpoint2.responses)->Array.toSorted(String.compare)
|
|
82
|
+
|
|
83
|
+
if codes1 != codes2 {
|
|
84
|
+
true
|
|
85
|
+
} else {
|
|
86
|
+
codes1->Array.some(code => {
|
|
87
|
+
switch (Dict.get(endpoint1.responses, code), Dict.get(endpoint2.responses, code)) {
|
|
88
|
+
| (Some(r1), Some(r2)) =>
|
|
89
|
+
switch (r1.content, r2.content) {
|
|
90
|
+
| (Some(c1), Some(c2)) => {
|
|
91
|
+
let contentKeys1 = Dict.keysToArray(c1)->Array.toSorted(String.compare)
|
|
92
|
+
let contentKeys2 = Dict.keysToArray(c2)->Array.toSorted(String.compare)
|
|
93
|
+
|
|
94
|
+
if contentKeys1 != contentKeys2 {
|
|
95
|
+
true
|
|
96
|
+
} else {
|
|
97
|
+
contentKeys1->Array.some(contentType => {
|
|
98
|
+
switch (Dict.get(c1, contentType), Dict.get(c2, contentType)) {
|
|
99
|
+
| (Some(mt1), Some(mt2)) =>
|
|
100
|
+
switch (mt1.schema, mt2.schema) {
|
|
101
|
+
| (Some(s1), Some(s2)) => !schemasEqual(s1, s2)
|
|
102
|
+
| _ => false
|
|
103
|
+
}
|
|
104
|
+
| _ => false
|
|
105
|
+
}
|
|
106
|
+
})
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
| (None, None) => false
|
|
110
|
+
| _ => true
|
|
111
|
+
}
|
|
112
|
+
| _ => false
|
|
113
|
+
}
|
|
114
|
+
})
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
if hasRequestBodyChanged || hasResponseChanged {
|
|
119
|
+
Some({
|
|
120
|
+
path: endpoint1.path,
|
|
121
|
+
method: endpoint1.method,
|
|
122
|
+
requestBodyChanged: hasRequestBodyChanged,
|
|
123
|
+
responseChanged: hasResponseChanged,
|
|
124
|
+
breakingChange: hasResponseChanged, // Response changes are breaking
|
|
125
|
+
})
|
|
126
|
+
} else {
|
|
127
|
+
None
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
// Compare endpoints between two specs
|
|
132
|
+
let compareEndpointLists = (
|
|
133
|
+
baseEndpoints: array<endpoint>,
|
|
134
|
+
forkEndpoints: array<endpoint>,
|
|
135
|
+
): (array<endpoint>, array<endpoint>, array<endpointDiff>) => {
|
|
136
|
+
// Create maps for efficient lookup
|
|
137
|
+
let baseMap = Dict.make()
|
|
138
|
+
baseEndpoints->Array.forEach(ep => {
|
|
139
|
+
let key = makeEndpointKey(ep.method, ep.path)
|
|
140
|
+
Dict.set(baseMap, key, ep)
|
|
141
|
+
})
|
|
142
|
+
|
|
143
|
+
let forkMap = Dict.make()
|
|
144
|
+
forkEndpoints->Array.forEach(ep => {
|
|
145
|
+
let key = makeEndpointKey(ep.method, ep.path)
|
|
146
|
+
Dict.set(forkMap, key, ep)
|
|
147
|
+
})
|
|
148
|
+
|
|
149
|
+
let baseKeys = Dict.keysToArray(baseMap)->Set.fromArray
|
|
150
|
+
let forkKeys = Dict.keysToArray(forkMap)->Set.fromArray
|
|
151
|
+
|
|
152
|
+
// Find added endpoints (in fork but not in base)
|
|
153
|
+
let added = forkKeys
|
|
154
|
+
->Set.difference(baseKeys)
|
|
155
|
+
->Set.toArray
|
|
156
|
+
->Array.filterMap(key => Dict.get(forkMap, key))
|
|
157
|
+
|
|
158
|
+
// Find removed endpoints (in base but not in fork)
|
|
159
|
+
let removed = baseKeys
|
|
160
|
+
->Set.difference(forkKeys)
|
|
161
|
+
->Set.toArray
|
|
162
|
+
->Array.filterMap(key => Dict.get(baseMap, key))
|
|
163
|
+
|
|
164
|
+
// Find modified endpoints (in both but different)
|
|
165
|
+
let modified = baseKeys
|
|
166
|
+
->Set.intersection(forkKeys)
|
|
167
|
+
->Set.toArray
|
|
168
|
+
->Array.filterMap(key => {
|
|
169
|
+
switch (Dict.get(baseMap, key), Dict.get(forkMap, key)) {
|
|
170
|
+
| (Some(baseEp), Some(forkEp)) => compareEndpoints(baseEp, forkEp)
|
|
171
|
+
| _ => None
|
|
172
|
+
}
|
|
173
|
+
})
|
|
174
|
+
|
|
175
|
+
(added, removed, modified)
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
// Compare component schemas between two specs
|
|
179
|
+
let compareComponentSchemas = (
|
|
180
|
+
baseSchemas: option<dict<jsonSchema>>,
|
|
181
|
+
forkSchemas: option<dict<jsonSchema>>,
|
|
182
|
+
): (array<string>, array<string>, array<schemaDiff>) => {
|
|
183
|
+
switch (baseSchemas, forkSchemas) {
|
|
184
|
+
| (None, None) => ([], [], [])
|
|
185
|
+
| (None, Some(fork)) => (Dict.keysToArray(fork), [], [])
|
|
186
|
+
| (Some(base), None) => ([], Dict.keysToArray(base), [])
|
|
187
|
+
| (Some(base), Some(fork)) => {
|
|
188
|
+
let baseKeys = Dict.keysToArray(base)->Set.fromArray
|
|
189
|
+
let forkKeys = Dict.keysToArray(fork)->Set.fromArray
|
|
190
|
+
|
|
191
|
+
// Added schemas
|
|
192
|
+
let added = forkKeys
|
|
193
|
+
->Set.difference(baseKeys)
|
|
194
|
+
->Set.toArray
|
|
195
|
+
|
|
196
|
+
// Removed schemas
|
|
197
|
+
let removed = baseKeys
|
|
198
|
+
->Set.difference(forkKeys)
|
|
199
|
+
->Set.toArray
|
|
200
|
+
|
|
201
|
+
// Modified schemas
|
|
202
|
+
let modified = baseKeys
|
|
203
|
+
->Set.intersection(forkKeys)
|
|
204
|
+
->Set.toArray
|
|
205
|
+
->Array.filterMap(name => {
|
|
206
|
+
switch (Dict.get(base, name), Dict.get(fork, name)) {
|
|
207
|
+
| (Some(baseSchema), Some(forkSchema)) =>
|
|
208
|
+
if !schemasEqual(baseSchema, forkSchema) {
|
|
209
|
+
Some({
|
|
210
|
+
name: name,
|
|
211
|
+
breakingChange: true, // Schema changes are generally breaking
|
|
212
|
+
})
|
|
213
|
+
} else {
|
|
214
|
+
None
|
|
215
|
+
}
|
|
216
|
+
| _ => None
|
|
217
|
+
}
|
|
218
|
+
})
|
|
219
|
+
|
|
220
|
+
(added, removed, modified)
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
// Generate a complete diff between two specs
|
|
226
|
+
let generateDiff = (
|
|
227
|
+
~baseSpec: openAPISpec,
|
|
228
|
+
~forkSpec: openAPISpec,
|
|
229
|
+
~baseEndpoints: array<endpoint>,
|
|
230
|
+
~forkEndpoints: array<endpoint>,
|
|
231
|
+
): specDiff => {
|
|
232
|
+
let (addedEndpoints, removedEndpoints, modifiedEndpoints) =
|
|
233
|
+
compareEndpointLists(baseEndpoints, forkEndpoints)
|
|
234
|
+
|
|
235
|
+
let (addedSchemas, removedSchemas, modifiedSchemas) =
|
|
236
|
+
compareComponentSchemas(
|
|
237
|
+
baseSpec.components->Option.flatMap(c => c.schemas),
|
|
238
|
+
forkSpec.components->Option.flatMap(c => c.schemas)
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
{
|
|
242
|
+
addedEndpoints: addedEndpoints,
|
|
243
|
+
removedEndpoints: removedEndpoints,
|
|
244
|
+
modifiedEndpoints: modifiedEndpoints,
|
|
245
|
+
addedSchemas: addedSchemas,
|
|
246
|
+
removedSchemas: removedSchemas,
|
|
247
|
+
modifiedSchemas: modifiedSchemas,
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
// Detect if there are any breaking changes
|
|
252
|
+
let hasBreakingChanges = (diff: specDiff): bool => {
|
|
253
|
+
let hasRemovedEndpoints = Array.length(diff.removedEndpoints) > 0
|
|
254
|
+
let hasBreakingEndpointChanges = diff.modifiedEndpoints
|
|
255
|
+
->Array.some(d => d.breakingChange)
|
|
256
|
+
let hasBreakingSchemaChanges = diff.modifiedSchemas
|
|
257
|
+
->Array.some(d => d.breakingChange)
|
|
258
|
+
|
|
259
|
+
hasRemovedEndpoints || hasBreakingEndpointChanges || hasBreakingSchemaChanges
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
// Count total changes
|
|
263
|
+
let countChanges = (diff: specDiff): int => {
|
|
264
|
+
Array.length(diff.addedEndpoints) +
|
|
265
|
+
Array.length(diff.removedEndpoints) +
|
|
266
|
+
Array.length(diff.modifiedEndpoints) +
|
|
267
|
+
Array.length(diff.addedSchemas) +
|
|
268
|
+
Array.length(diff.removedSchemas) +
|
|
269
|
+
Array.length(diff.modifiedSchemas)
|
|
270
|
+
}
|
|
@@ -0,0 +1,245 @@
|
|
|
1
|
+
// SPDX-License-Identifier: MPL-2.0
|
|
2
|
+
|
|
3
|
+
// SpecMerger.res - Merge base and fork OpenAPI specifications
|
|
4
|
+
open Types
|
|
5
|
+
|
|
6
|
+
// Helper to create endpoint key
|
|
7
|
+
let makeEndpointKey = (endpoint: endpoint): string => {
|
|
8
|
+
`${endpoint.method}:${endpoint.path}`
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
// Extract shared endpoints (present in both specs with same signature)
|
|
12
|
+
// For SharedBase strategy, this returns ALL base endpoints
|
|
13
|
+
let extractSharedEndpoints = (
|
|
14
|
+
baseEndpoints: array<endpoint>,
|
|
15
|
+
_forkEndpoints: array<endpoint>,
|
|
16
|
+
): array<endpoint> => {
|
|
17
|
+
// Return ALL base endpoints - the base should be complete
|
|
18
|
+
baseEndpoints
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
// Extract fork-specific endpoints (new AND modified endpoints)
|
|
22
|
+
let extractForkExtensions = (
|
|
23
|
+
baseEndpoints: array<endpoint>,
|
|
24
|
+
forkEndpoints: array<endpoint>,
|
|
25
|
+
): array<endpoint> => {
|
|
26
|
+
// Create base map for lookup
|
|
27
|
+
let baseMap = Dict.make()
|
|
28
|
+
baseEndpoints->Array.forEach(ep => {
|
|
29
|
+
let key = makeEndpointKey(ep)
|
|
30
|
+
Dict.set(baseMap, key, ep)
|
|
31
|
+
})
|
|
32
|
+
|
|
33
|
+
// Find endpoints in fork that are new OR modified
|
|
34
|
+
forkEndpoints->Array.filter(forkEp => {
|
|
35
|
+
let key = makeEndpointKey(forkEp)
|
|
36
|
+
switch Dict.get(baseMap, key) {
|
|
37
|
+
| None => true // New endpoint - include it
|
|
38
|
+
| Some(baseEp) => {
|
|
39
|
+
// Check if endpoint is modified
|
|
40
|
+
switch SpecDiffer.compareEndpoints(baseEp, forkEp) {
|
|
41
|
+
| Some(_diff) => true // Modified - include it
|
|
42
|
+
| None => false // Identical - exclude it
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
})
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// Extract shared component schemas (ALL base schemas for SharedBase strategy)
|
|
50
|
+
let extractSharedSchemas = (
|
|
51
|
+
baseSchemas: option<dict<jsonSchema>>,
|
|
52
|
+
_forkSchemas: option<dict<jsonSchema>>,
|
|
53
|
+
): option<dict<jsonSchema>> => {
|
|
54
|
+
// Return ALL base schemas - the base should be complete
|
|
55
|
+
baseSchemas
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// Extract fork-specific component schemas
|
|
59
|
+
let extractForkSchemas = (
|
|
60
|
+
baseSchemas: option<dict<jsonSchema>>,
|
|
61
|
+
forkSchemas: option<dict<jsonSchema>>,
|
|
62
|
+
): option<dict<jsonSchema>> => {
|
|
63
|
+
switch (baseSchemas, forkSchemas) {
|
|
64
|
+
| (_, None) => None
|
|
65
|
+
| (None, Some(fork)) => Some(fork) // All fork schemas are extensions
|
|
66
|
+
| (Some(base), Some(fork)) => {
|
|
67
|
+
let extensions = Dict.make()
|
|
68
|
+
|
|
69
|
+
Dict.keysToArray(fork)->Array.forEach(name => {
|
|
70
|
+
switch (Dict.get(base, name), Dict.get(fork, name)) {
|
|
71
|
+
| (None, Some(forkSchema)) => {
|
|
72
|
+
// New schema in fork
|
|
73
|
+
Dict.set(extensions, name, forkSchema)
|
|
74
|
+
}
|
|
75
|
+
| (Some(baseSchema), Some(forkSchema)) => {
|
|
76
|
+
// Check if modified
|
|
77
|
+
if !SpecDiffer.schemasEqual(baseSchema, forkSchema) {
|
|
78
|
+
Dict.set(extensions, name, forkSchema)
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
| _ => ()
|
|
82
|
+
}
|
|
83
|
+
})
|
|
84
|
+
|
|
85
|
+
if Dict.keysToArray(extensions)->Array.length > 0 {
|
|
86
|
+
Some(extensions)
|
|
87
|
+
} else {
|
|
88
|
+
None
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// Create a new spec with given endpoints
|
|
95
|
+
let createSpecWithEndpoints = (
|
|
96
|
+
~baseSpec: openAPISpec,
|
|
97
|
+
~endpoints: array<endpoint>,
|
|
98
|
+
~schemas: option<dict<jsonSchema>>,
|
|
99
|
+
): openAPISpec => {
|
|
100
|
+
// Group endpoints by path
|
|
101
|
+
let pathsDict = Dict.make()
|
|
102
|
+
|
|
103
|
+
endpoints->Array.forEach(ep => {
|
|
104
|
+
let pathItem = switch Dict.get(pathsDict, ep.path) {
|
|
105
|
+
| Some(existing) => existing
|
|
106
|
+
| None => {
|
|
107
|
+
get: None,
|
|
108
|
+
post: None,
|
|
109
|
+
put: None,
|
|
110
|
+
delete: None,
|
|
111
|
+
patch: None,
|
|
112
|
+
head: None,
|
|
113
|
+
options: None,
|
|
114
|
+
parameters: None,
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
let operation: operation = {
|
|
119
|
+
operationId: ep.operationId,
|
|
120
|
+
summary: ep.summary,
|
|
121
|
+
description: ep.description,
|
|
122
|
+
tags: ep.tags,
|
|
123
|
+
parameters: ep.parameters,
|
|
124
|
+
requestBody: ep.requestBody,
|
|
125
|
+
responses: ep.responses,
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
let updatedPathItem = switch String.toLowerCase(ep.method) {
|
|
129
|
+
| "get" => {...pathItem, get: Some(operation)}
|
|
130
|
+
| "post" => {...pathItem, post: Some(operation)}
|
|
131
|
+
| "put" => {...pathItem, put: Some(operation)}
|
|
132
|
+
| "delete" => {...pathItem, delete: Some(operation)}
|
|
133
|
+
| "patch" => {...pathItem, patch: Some(operation)}
|
|
134
|
+
| _ => pathItem
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
Dict.set(pathsDict, ep.path, updatedPathItem)
|
|
138
|
+
})
|
|
139
|
+
|
|
140
|
+
// Update components with schemas
|
|
141
|
+
let components = switch (baseSpec.components, schemas) {
|
|
142
|
+
| (Some(_comp), Some(sch)) => Some({schemas: Some(sch)})
|
|
143
|
+
| (Some(_comp), None) => Some({schemas: None})
|
|
144
|
+
| (None, Some(sch)) => Some({
|
|
145
|
+
schemas: Some(sch),
|
|
146
|
+
})
|
|
147
|
+
| (None, None) => None
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
{
|
|
151
|
+
...baseSpec,
|
|
152
|
+
paths: pathsDict,
|
|
153
|
+
components: components,
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
// Merge two specs using SharedBase strategy
|
|
158
|
+
let mergeWithSharedBase = (
|
|
159
|
+
~baseSpec: openAPISpec,
|
|
160
|
+
~forkSpec: openAPISpec,
|
|
161
|
+
~baseEndpoints: array<endpoint>,
|
|
162
|
+
~forkEndpoints: array<endpoint>,
|
|
163
|
+
): (openAPISpec, openAPISpec) => {
|
|
164
|
+
// Extract shared and fork-specific endpoints
|
|
165
|
+
let sharedEndpoints = extractSharedEndpoints(baseEndpoints, forkEndpoints)
|
|
166
|
+
let forkExtensions = extractForkExtensions(baseEndpoints, forkEndpoints)
|
|
167
|
+
|
|
168
|
+
// Extract shared and fork-specific schemas
|
|
169
|
+
let baseSchemas = baseSpec.components->Option.flatMap(c => c.schemas)
|
|
170
|
+
let forkSchemas = forkSpec.components->Option.flatMap(c => c.schemas)
|
|
171
|
+
|
|
172
|
+
let sharedSchemas = extractSharedSchemas(baseSchemas, forkSchemas)
|
|
173
|
+
let extensionSchemas = extractForkSchemas(baseSchemas, forkSchemas)
|
|
174
|
+
|
|
175
|
+
// Create shared spec
|
|
176
|
+
let sharedSpec = createSpecWithEndpoints(
|
|
177
|
+
~baseSpec=baseSpec,
|
|
178
|
+
~endpoints=sharedEndpoints,
|
|
179
|
+
~schemas=sharedSchemas,
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
// Create fork extensions spec
|
|
183
|
+
let extensionsSpec = createSpecWithEndpoints(
|
|
184
|
+
~baseSpec=forkSpec,
|
|
185
|
+
~endpoints=forkExtensions,
|
|
186
|
+
~schemas=extensionSchemas,
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
(sharedSpec, extensionsSpec)
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
// Merge two specs using Separate strategy (keep both complete)
|
|
193
|
+
let mergeWithSeparate = (
|
|
194
|
+
~baseSpec: openAPISpec,
|
|
195
|
+
~forkSpec: openAPISpec,
|
|
196
|
+
): (openAPISpec, openAPISpec) => {
|
|
197
|
+
// No merging, just return both specs as-is
|
|
198
|
+
(baseSpec, forkSpec)
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
// Merge specs according to strategy
|
|
202
|
+
let mergeSpecs = (
|
|
203
|
+
~baseSpec: openAPISpec,
|
|
204
|
+
~forkSpec: openAPISpec,
|
|
205
|
+
~baseEndpoints: array<endpoint>,
|
|
206
|
+
~forkEndpoints: array<endpoint>,
|
|
207
|
+
~strategy: generationStrategy,
|
|
208
|
+
): (openAPISpec, openAPISpec) => {
|
|
209
|
+
switch strategy {
|
|
210
|
+
| Separate => mergeWithSeparate(~baseSpec, ~forkSpec)
|
|
211
|
+
| SharedBase =>
|
|
212
|
+
mergeWithSharedBase(~baseSpec, ~forkSpec, ~baseEndpoints, ~forkEndpoints)
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
// Calculate merge statistics
|
|
217
|
+
type mergeStats = {
|
|
218
|
+
sharedEndpointCount: int,
|
|
219
|
+
forkExtensionCount: int,
|
|
220
|
+
sharedSchemaCount: int,
|
|
221
|
+
forkSchemaCount: int,
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
let getMergeStats = (
|
|
225
|
+
~baseEndpoints: array<endpoint>,
|
|
226
|
+
~forkEndpoints: array<endpoint>,
|
|
227
|
+
~baseSchemas: option<dict<jsonSchema>>,
|
|
228
|
+
~forkSchemas: option<dict<jsonSchema>>,
|
|
229
|
+
): mergeStats => {
|
|
230
|
+
let shared = extractSharedEndpoints(baseEndpoints, forkEndpoints)
|
|
231
|
+
let extensions = extractForkExtensions(baseEndpoints, forkEndpoints)
|
|
232
|
+
let sharedSchemas = extractSharedSchemas(baseSchemas, forkSchemas)
|
|
233
|
+
let extensionSchemas = extractForkSchemas(baseSchemas, forkSchemas)
|
|
234
|
+
|
|
235
|
+
{
|
|
236
|
+
sharedEndpointCount: Array.length(shared),
|
|
237
|
+
forkExtensionCount: Array.length(extensions),
|
|
238
|
+
sharedSchemaCount: sharedSchemas->Option.mapOr(0, s =>
|
|
239
|
+
Dict.keysToArray(s)->Array.length
|
|
240
|
+
),
|
|
241
|
+
forkSchemaCount: extensionSchemas->Option.mapOr(0, s =>
|
|
242
|
+
Dict.keysToArray(s)->Array.length
|
|
243
|
+
),
|
|
244
|
+
}
|
|
245
|
+
}
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
// SPDX-License-Identifier: MPL-2.0
|
|
2
|
+
|
|
3
|
+
// ComponentSchemaGenerator.res - Generate shared component schema module
|
|
4
|
+
open Types
|
|
5
|
+
|
|
6
|
+
let rec extractReferencedSchemaNames = (irType: SchemaIR.irType) =>
|
|
7
|
+
switch irType {
|
|
8
|
+
| Reference(ref) =>
|
|
9
|
+
let parts = ref->String.split("/")
|
|
10
|
+
[parts->Array.get(parts->Array.length - 1)->Option.getOr("")]
|
|
11
|
+
| Array({items}) => extractReferencedSchemaNames(items)
|
|
12
|
+
| Object({properties}) => properties->Array.flatMap(((_name, fieldType, _)) => extractReferencedSchemaNames(fieldType))
|
|
13
|
+
| Union(types)
|
|
14
|
+
| Intersection(types) =>
|
|
15
|
+
types->Array.flatMap(extractReferencedSchemaNames)
|
|
16
|
+
| Option(inner) => extractReferencedSchemaNames(inner)
|
|
17
|
+
| _ => []
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
let generate = (~spec, ~outputDir) => {
|
|
21
|
+
let (context, parseWarnings) =
|
|
22
|
+
spec.components
|
|
23
|
+
->Option.flatMap(components => components.schemas)
|
|
24
|
+
->Option.mapOr(({SchemaIR.schemas: Dict.make()}, []), schemas =>
|
|
25
|
+
SchemaIRParser.parseComponentSchemas(schemas)
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
if Dict.size(context.schemas) == 0 {
|
|
29
|
+
Pipeline.empty
|
|
30
|
+
} else {
|
|
31
|
+
let schemas = Dict.valuesToArray(context.schemas)
|
|
32
|
+
let schemaNameMap = Dict.fromArray(schemas->Array.map(s => (s.name, s)))
|
|
33
|
+
|
|
34
|
+
// Map each schema to its internal dependencies (other schemas in the same spec)
|
|
35
|
+
let dependencyMap = schemas->Array.reduce(Dict.make(), (acc, schema) => {
|
|
36
|
+
let references =
|
|
37
|
+
extractReferencedSchemaNames(schema.type_)->Array.filter(name =>
|
|
38
|
+
Dict.has(schemaNameMap, name) && name != schema.name
|
|
39
|
+
)
|
|
40
|
+
Dict.set(acc, schema.name, references)
|
|
41
|
+
acc
|
|
42
|
+
})
|
|
43
|
+
|
|
44
|
+
// Topological sort (Kahn's algorithm) to handle schema dependencies
|
|
45
|
+
let sortedSchemas = []
|
|
46
|
+
let inDegreeMap = schemas->Array.reduce(Dict.make(), (acc, schema) => {
|
|
47
|
+
let degree = Dict.get(dependencyMap, schema.name)->Option.mapOr(0, Array.length)
|
|
48
|
+
Dict.set(acc, schema.name, degree)
|
|
49
|
+
acc
|
|
50
|
+
})
|
|
51
|
+
|
|
52
|
+
let queue = schemas->Array.filter(schema => Dict.get(inDegreeMap, schema.name)->Option.getOr(0) == 0)
|
|
53
|
+
|
|
54
|
+
while Array.length(queue) > 0 {
|
|
55
|
+
let schema = switch Array.shift(queue) {
|
|
56
|
+
| Some(v) => v
|
|
57
|
+
| None => schemas->Array.getUnsafe(0) // Should not happen
|
|
58
|
+
}
|
|
59
|
+
sortedSchemas->Array.push(schema)
|
|
60
|
+
|
|
61
|
+
schemas->Array.forEach(otherSchema => {
|
|
62
|
+
let dependsOnCurrent =
|
|
63
|
+
Dict.get(dependencyMap, otherSchema.name)
|
|
64
|
+
->Option.getOr([])
|
|
65
|
+
->Array.some(name => name == schema.name)
|
|
66
|
+
|
|
67
|
+
if dependsOnCurrent {
|
|
68
|
+
let currentDegree = Dict.get(inDegreeMap, otherSchema.name)->Option.getOr(0)
|
|
69
|
+
let newDegree = currentDegree - 1
|
|
70
|
+
Dict.set(inDegreeMap, otherSchema.name, newDegree)
|
|
71
|
+
if newDegree == 0 {
|
|
72
|
+
queue->Array.push(otherSchema)
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
})
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// Ensure all schemas are included even if there's a circular dependency
|
|
79
|
+
let sortedNames = sortedSchemas->Array.map(s => s.name)
|
|
80
|
+
let remainingSchemas =
|
|
81
|
+
schemas
|
|
82
|
+
->Array.filter(s => !(sortedNames->Array.some(name => name == s.name)))
|
|
83
|
+
->Array.toSorted((a, b) => String.compare(a.name, b.name))
|
|
84
|
+
|
|
85
|
+
let finalSortedSchemas = Array.concat(sortedSchemas, remainingSchemas)
|
|
86
|
+
let availableSchemaNames = finalSortedSchemas->Array.map(s => s.name)
|
|
87
|
+
let warnings = Array.copy(parseWarnings)
|
|
88
|
+
|
|
89
|
+
let moduleCodes = finalSortedSchemas->Array.map(schema => {
|
|
90
|
+
let typeCtx = GenerationContext.make(
|
|
91
|
+
~path=`ComponentSchemas.${schema.name}`,
|
|
92
|
+
~insideComponentSchemas=true,
|
|
93
|
+
~availableSchemas=availableSchemaNames,
|
|
94
|
+
(),
|
|
95
|
+
)
|
|
96
|
+
let schemaCtx = GenerationContext.make(
|
|
97
|
+
~path=`ComponentSchemas.${schema.name}`,
|
|
98
|
+
~insideComponentSchemas=true,
|
|
99
|
+
~availableSchemas=availableSchemaNames,
|
|
100
|
+
(),
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
let typeCode = IRToTypeGenerator.generateTypeWithContext(~ctx=typeCtx, ~depth=0, schema.type_)
|
|
104
|
+
let schemaCode = IRToSuryGenerator.generateSchemaWithContext(~ctx=schemaCtx, ~depth=0, schema.type_)
|
|
105
|
+
|
|
106
|
+
warnings->Array.pushMany(typeCtx.warnings)
|
|
107
|
+
warnings->Array.pushMany(schemaCtx.warnings)
|
|
108
|
+
|
|
109
|
+
let docComment = schema.description->Option.mapOr("", d =>
|
|
110
|
+
CodegenUtils.generateDocString(~description=d, ())
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
`${docComment}module ${CodegenUtils.toPascalCase(schema.name)} = {
|
|
114
|
+
type t = ${typeCode}
|
|
115
|
+
let schema = ${schemaCode}
|
|
116
|
+
}`
|
|
117
|
+
})
|
|
118
|
+
|
|
119
|
+
let fileHeader = CodegenUtils.generateFileHeader(~description="Shared component schemas")
|
|
120
|
+
let fileContent = `${fileHeader}\n\n${moduleCodes->Array.join("\n\n")}`
|
|
121
|
+
|
|
122
|
+
Pipeline.fromFilesAndWarnings(
|
|
123
|
+
[{path: FileSystem.makePath(outputDir, "ComponentSchemas.res"), content: fileContent}],
|
|
124
|
+
warnings,
|
|
125
|
+
)
|
|
126
|
+
}
|
|
127
|
+
}
|