@osdk/vite-plugin-oac 0.1.0-beta.0 → 0.1.0-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +23 -0
- package/build/browser/Logger.js +64 -0
- package/build/browser/Logger.js.map +1 -0
- package/build/browser/OacConfig.js +22 -0
- package/build/browser/OacConfig.js.map +1 -0
- package/build/browser/OacContext.js +42 -0
- package/build/browser/OacContext.js.map +1 -0
- package/build/browser/OacDevServer.js +80 -0
- package/build/browser/OacDevServer.js.map +1 -0
- package/build/browser/OacServerContext.js +26 -0
- package/build/browser/OacServerContext.js.map +1 -0
- package/build/browser/applyOntologyAndSeed.js +38 -0
- package/build/browser/applyOntologyAndSeed.js.map +1 -0
- package/build/browser/generateOntologyAssets.js +81 -87
- package/build/browser/generateOntologyAssets.js.map +1 -1
- package/build/browser/index.js +2 -52
- package/build/browser/index.js.map +1 -1
- package/build/browser/ontologyAsCode.js +58 -0
- package/build/browser/ontologyAsCode.js.map +1 -0
- package/build/browser/registerOntologyFullMetadata.js +90 -57
- package/build/browser/registerOntologyFullMetadata.js.map +1 -1
- package/build/browser/syncDirectories.js +11 -34
- package/build/browser/syncDirectories.js.map +1 -1
- package/build/cjs/index.cjs +487 -414
- package/build/cjs/index.cjs.map +1 -1
- package/build/cjs/index.d.cts +20 -6
- package/build/esm/Logger.js +64 -0
- package/build/esm/Logger.js.map +1 -0
- package/build/esm/OacConfig.js +22 -0
- package/build/esm/OacConfig.js.map +1 -0
- package/build/esm/OacContext.js +42 -0
- package/build/esm/OacContext.js.map +1 -0
- package/build/esm/OacDevServer.js +80 -0
- package/build/esm/OacDevServer.js.map +1 -0
- package/build/esm/OacServerContext.js +26 -0
- package/build/esm/OacServerContext.js.map +1 -0
- package/build/esm/applyOntologyAndSeed.js +38 -0
- package/build/esm/applyOntologyAndSeed.js.map +1 -0
- package/build/esm/generateOntologyAssets.js +81 -87
- package/build/esm/generateOntologyAssets.js.map +1 -1
- package/build/esm/index.js +2 -52
- package/build/esm/index.js.map +1 -1
- package/build/esm/ontologyAsCode.js +58 -0
- package/build/esm/ontologyAsCode.js.map +1 -0
- package/build/esm/registerOntologyFullMetadata.js +90 -57
- package/build/esm/registerOntologyFullMetadata.js.map +1 -1
- package/build/esm/syncDirectories.js +11 -34
- package/build/esm/syncDirectories.js.map +1 -1
- package/build/types/Logger.d.ts +18 -0
- package/build/types/Logger.d.ts.map +1 -0
- package/build/types/OacConfig.d.ts +12 -0
- package/build/types/OacConfig.d.ts.map +1 -0
- package/build/types/OacContext.d.ts +12 -0
- package/build/types/OacContext.d.ts.map +1 -0
- package/build/types/OacDevServer.d.ts +10 -0
- package/build/types/OacDevServer.d.ts.map +1 -0
- package/build/types/OacServerContext.d.ts +12 -0
- package/build/types/OacServerContext.d.ts.map +1 -0
- package/build/types/applyOntologyAndSeed.d.ts +3 -0
- package/build/types/applyOntologyAndSeed.d.ts.map +1 -0
- package/build/types/generateOntologyAssets.d.ts +4 -7
- package/build/types/generateOntologyAssets.d.ts.map +1 -1
- package/build/types/index.d.ts +2 -11
- package/build/types/index.d.ts.map +1 -1
- package/build/types/ontologyAsCode.d.ts +7 -0
- package/build/types/ontologyAsCode.d.ts.map +1 -0
- package/build/types/registerOntologyFullMetadata.d.ts.map +1 -1
- package/build/types/syncDirectories.d.ts +1 -1
- package/build/types/syncDirectories.d.ts.map +1 -1
- package/package.json +10 -7
- package/build/browser/FauxFoundryTypes.js +0 -2
- package/build/browser/FauxFoundryTypes.js.map +0 -1
- package/build/browser/FoundryMiddlewareController.js +0 -88
- package/build/browser/FoundryMiddlewareController.js.map +0 -1
- package/build/browser/watchOntologyAsCode.js +0 -69
- package/build/browser/watchOntologyAsCode.js.map +0 -1
- package/build/esm/FauxFoundryTypes.js +0 -2
- package/build/esm/FauxFoundryTypes.js.map +0 -1
- package/build/esm/FoundryMiddlewareController.js +0 -88
- package/build/esm/FoundryMiddlewareController.js.map +0 -1
- package/build/esm/watchOntologyAsCode.js +0 -69
- package/build/esm/watchOntologyAsCode.js.map +0 -1
- package/build/types/FauxFoundryTypes.d.ts +0 -2
- package/build/types/FauxFoundryTypes.d.ts.map +0 -1
- package/build/types/FoundryMiddlewareController.d.ts +0 -14
- package/build/types/FoundryMiddlewareController.d.ts.map +0 -1
- package/build/types/watchOntologyAsCode.d.ts +0 -10
- package/build/types/watchOntologyAsCode.d.ts.map +0 -1
package/build/cjs/index.cjs
CHANGED
|
@@ -1,14 +1,18 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
var
|
|
3
|
+
var fs = require('fs');
|
|
4
|
+
var generatorConverters_ontologyir = require('@osdk/generator-converters.ontologyir');
|
|
5
|
+
var execa = require('execa');
|
|
6
|
+
var path = require('path');
|
|
7
|
+
var util = require('util');
|
|
8
|
+
var crypto2 = require('crypto');
|
|
4
9
|
var faux = require('@osdk/faux');
|
|
10
|
+
var chalk = require('chalk');
|
|
5
11
|
var EventEmitter = require('events');
|
|
6
12
|
var jiti = require('jiti');
|
|
13
|
+
var invariant = require('tiny-invariant');
|
|
14
|
+
var Emittery = require('emittery');
|
|
7
15
|
var stream = require('stream');
|
|
8
|
-
var fs2 = require('fs');
|
|
9
|
-
var generatorConverters_ontologyir = require('@osdk/generator-converters.ontologyir');
|
|
10
|
-
var execa = require('execa');
|
|
11
|
-
var crypto2 = require('crypto');
|
|
12
16
|
|
|
13
17
|
var _documentCurrentScript = typeof document !== 'undefined' ? document.currentScript : null;
|
|
14
18
|
function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
|
|
@@ -31,265 +35,24 @@ function _interopNamespace(e) {
|
|
|
31
35
|
return Object.freeze(n);
|
|
32
36
|
}
|
|
33
37
|
|
|
34
|
-
var
|
|
35
|
-
var
|
|
36
|
-
var
|
|
38
|
+
var fs__namespace = /*#__PURE__*/_interopNamespace(fs);
|
|
39
|
+
var path__namespace = /*#__PURE__*/_interopNamespace(path);
|
|
40
|
+
var util__namespace = /*#__PURE__*/_interopNamespace(util);
|
|
37
41
|
var crypto2__default = /*#__PURE__*/_interopDefault(crypto2);
|
|
42
|
+
var chalk__default = /*#__PURE__*/_interopDefault(chalk);
|
|
43
|
+
var EventEmitter__default = /*#__PURE__*/_interopDefault(EventEmitter);
|
|
44
|
+
var invariant__default = /*#__PURE__*/_interopDefault(invariant);
|
|
45
|
+
var Emittery__default = /*#__PURE__*/_interopDefault(Emittery);
|
|
38
46
|
|
|
39
|
-
// src/
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
debug: false,
|
|
44
|
-
importMeta: ({ url: (typeof document === 'undefined' ? require('u' + 'rl').pathToFileURL(__filename).href : (_documentCurrentScript && _documentCurrentScript.tagName.toUpperCase() === 'SCRIPT' && _documentCurrentScript.src || new URL('index.cjs', document.baseURI).href)) })
|
|
45
|
-
});
|
|
46
|
-
const module = await jiti$1.import(seedPath);
|
|
47
|
-
module.default(fauxFoundry);
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
// src/registerOntologyFullMetadata.ts
|
|
51
|
-
function registerOntologyFullMetadata(ontology, ontologyFullMetadata) {
|
|
52
|
-
Object.values(ontologyFullMetadata.objectTypes).forEach((objectType) => {
|
|
53
|
-
ontology.registerObjectType(objectType);
|
|
54
|
-
});
|
|
55
|
-
Object.values(ontologyFullMetadata.actionTypes).forEach((actionType) => {
|
|
56
|
-
const implementation = createActionImplementation(actionType);
|
|
57
|
-
const actionTypeWithCamelCaseApiName = {
|
|
58
|
-
...actionType,
|
|
59
|
-
apiName: camelcase(actionType.apiName)
|
|
60
|
-
};
|
|
61
|
-
ontology.registerActionType(actionTypeWithCamelCaseApiName, implementation);
|
|
62
|
-
});
|
|
63
|
-
Object.values(ontologyFullMetadata.sharedPropertyTypes).forEach((actionType) => {
|
|
64
|
-
ontology.registerSharedPropertyType(actionType);
|
|
65
|
-
});
|
|
66
|
-
Object.values(ontologyFullMetadata.queryTypes).forEach((query) => {
|
|
67
|
-
ontology.registerQueryType(query);
|
|
68
|
-
});
|
|
69
|
-
Object.values(ontologyFullMetadata.interfaceTypes).forEach((iface) => {
|
|
70
|
-
ontology.registerInterfaceType(iface);
|
|
71
|
-
});
|
|
72
|
-
}
|
|
73
|
-
function createActionImplementation(actionType) {
|
|
74
|
-
return (batch, payload, _ctx) => {
|
|
75
|
-
const params = payload.parameters;
|
|
76
|
-
for (const operation of actionType.operations) {
|
|
77
|
-
switch (operation.type) {
|
|
78
|
-
case "createObject": {
|
|
79
|
-
const objectTypeApiName = operation.objectTypeApiName;
|
|
80
|
-
const primaryKey = params.primaryKey_;
|
|
81
|
-
const objectData = {};
|
|
82
|
-
for (const [key, value] of Object.entries(params)) {
|
|
83
|
-
if (key !== "primaryKey_") {
|
|
84
|
-
const param = actionType.parameters[key];
|
|
85
|
-
objectData[key] = toDataValue(value, param);
|
|
86
|
-
}
|
|
87
|
-
}
|
|
88
|
-
batch.addObject(objectTypeApiName, primaryKey, objectData);
|
|
89
|
-
break;
|
|
90
|
-
}
|
|
91
|
-
case "modifyObject": {
|
|
92
|
-
let objectTypeApiName;
|
|
93
|
-
let primaryKey;
|
|
94
|
-
if (typeof operation.objectTypeApiName === "string") {
|
|
95
|
-
objectTypeApiName = operation.objectTypeApiName;
|
|
96
|
-
primaryKey = params.primaryKey_;
|
|
97
|
-
} else {
|
|
98
|
-
const objectToModify = params[operation.objectTypeApiName];
|
|
99
|
-
if (objectToModify) {
|
|
100
|
-
objectTypeApiName = objectToModify.objectTypeApiName || objectToModify;
|
|
101
|
-
primaryKey = objectToModify.primaryKeyValue || params.primaryKey_;
|
|
102
|
-
} else {
|
|
103
|
-
objectTypeApiName = operation.objectTypeApiName;
|
|
104
|
-
primaryKey = params.primaryKey_;
|
|
105
|
-
}
|
|
106
|
-
}
|
|
107
|
-
const objectData = {};
|
|
108
|
-
for (const [key, value] of Object.entries(params)) {
|
|
109
|
-
if (key !== "primaryKey_" && key !== "objectToModifyParameter") {
|
|
110
|
-
const param = actionType.parameters[key];
|
|
111
|
-
objectData[key] = toDataValue(value, param);
|
|
112
|
-
}
|
|
113
|
-
}
|
|
114
|
-
batch.modifyObject(objectTypeApiName, primaryKey, objectData);
|
|
115
|
-
break;
|
|
116
|
-
}
|
|
117
|
-
case "deleteObject": {
|
|
118
|
-
let objectTypeApiName;
|
|
119
|
-
let primaryKey;
|
|
120
|
-
if (typeof operation.objectTypeApiName === "string") {
|
|
121
|
-
objectTypeApiName = operation.objectTypeApiName;
|
|
122
|
-
primaryKey = params.primaryKey_;
|
|
123
|
-
} else {
|
|
124
|
-
const objectToDelete = params[operation.objectTypeApiName];
|
|
125
|
-
if (objectToDelete) {
|
|
126
|
-
objectTypeApiName = objectToDelete.objectTypeApiName || objectToDelete;
|
|
127
|
-
primaryKey = objectToDelete.primaryKeyValue || params.primaryKey_;
|
|
128
|
-
} else {
|
|
129
|
-
objectTypeApiName = operation.objectTypeApiName;
|
|
130
|
-
primaryKey = params.primaryKey_;
|
|
131
|
-
}
|
|
132
|
-
}
|
|
133
|
-
batch.deleteObject(objectTypeApiName, primaryKey);
|
|
134
|
-
break;
|
|
135
|
-
}
|
|
136
|
-
case "createLink": {
|
|
137
|
-
const aSideObjectTypeApiName = operation.aSideObjectTypeApiName;
|
|
138
|
-
const bSideObjectTypeApiName = operation.bSideObjectTypeApiName;
|
|
139
|
-
const linkTypeApiNameAtoB = operation.linkTypeApiNameAtoB;
|
|
140
|
-
const aSidePrimaryKey = params.aSidePrimaryKey || params.primaryKey_;
|
|
141
|
-
const bSidePrimaryKey = params.bSidePrimaryKey || params.linkedObjectPrimaryKey;
|
|
142
|
-
if (aSidePrimaryKey && bSidePrimaryKey) {
|
|
143
|
-
batch.addLink(aSideObjectTypeApiName, aSidePrimaryKey, linkTypeApiNameAtoB, bSideObjectTypeApiName, bSidePrimaryKey);
|
|
144
|
-
}
|
|
145
|
-
break;
|
|
146
|
-
}
|
|
147
|
-
case "deleteLink": {
|
|
148
|
-
const aSideObjectTypeApiName = operation.aSideObjectTypeApiName;
|
|
149
|
-
const bSideObjectTypeApiName = operation.bSideObjectTypeApiName;
|
|
150
|
-
const linkTypeApiNameAtoB = operation.linkTypeApiNameAtoB;
|
|
151
|
-
const aSidePrimaryKey = params.aSidePrimaryKey || params.primaryKey_;
|
|
152
|
-
const bSidePrimaryKey = params.bSidePrimaryKey || params.linkedObjectPrimaryKey;
|
|
153
|
-
if (aSidePrimaryKey && bSidePrimaryKey) {
|
|
154
|
-
batch.removeLink(aSideObjectTypeApiName, aSidePrimaryKey, linkTypeApiNameAtoB, bSideObjectTypeApiName, bSidePrimaryKey);
|
|
155
|
-
}
|
|
156
|
-
break;
|
|
157
|
-
}
|
|
158
|
-
// Handle other operation types as needed
|
|
159
|
-
case "createInterfaceObject":
|
|
160
|
-
case "modifyInterfaceObject":
|
|
161
|
-
case "deleteInterfaceObject":
|
|
162
|
-
throw new Error(`Operation type ${operation.type} not implemented yet`);
|
|
163
|
-
default:
|
|
164
|
-
throw new Error(`Unknown operation type: ${operation.type}`);
|
|
165
|
-
}
|
|
166
|
-
}
|
|
167
|
-
};
|
|
168
|
-
}
|
|
169
|
-
function camelcase(apiName) {
|
|
170
|
-
return apiName.toLowerCase().replace(/[-_]+(.)?/g, (_, chr) => chr ? chr.toUpperCase() : "");
|
|
171
|
-
}
|
|
172
|
-
function toDataValue(value, param) {
|
|
173
|
-
if (param.dataType.type === "geoshape" && typeof value === "string") {
|
|
174
|
-
return latLonStringToGeoJSON(value);
|
|
175
|
-
}
|
|
176
|
-
return value;
|
|
177
|
-
}
|
|
178
|
-
function latLonStringToGeoJSON(latLonStr) {
|
|
179
|
-
const [lat, lon] = latLonStr.split(",").map(Number);
|
|
180
|
-
if (isNaN(lat) || isNaN(lon) || lat < -90 || lat > 90 || lon < -180 || lon > 180) {
|
|
181
|
-
throw new Error("Invalid latitude or longitude");
|
|
182
|
-
}
|
|
183
|
-
return {
|
|
184
|
-
type: "Point",
|
|
185
|
-
coordinates: [lon, lat]
|
|
186
|
-
// GeoJSON uses [longitude, latitude]
|
|
187
|
-
};
|
|
188
|
-
}
|
|
189
|
-
async function routeConnectToMsw(baseUrl, handlers, emitter, req, res, next) {
|
|
190
|
-
const method = req.method ?? "GET";
|
|
191
|
-
const canRequestHaveBody = method !== "HEAD" && method !== "GET";
|
|
192
|
-
const mockRequest = new Request(new URL(req.url, baseUrl), {
|
|
193
|
-
method,
|
|
194
|
-
headers: new Headers(req.headers),
|
|
195
|
-
credentials: "omit",
|
|
196
|
-
// @ts-expect-error Internal Undici property.
|
|
197
|
-
duplex: canRequestHaveBody ? "half" : void 0,
|
|
198
|
-
body: canRequestHaveBody ? stream.Readable.toWeb(req) : void 0
|
|
199
|
-
});
|
|
200
|
-
await faux.msw.handleRequest(mockRequest, crypto.randomUUID(), handlers, {
|
|
201
|
-
onUnhandledRequest: "bypass"
|
|
202
|
-
}, emitter, {
|
|
203
|
-
resolutionContext: {
|
|
204
|
-
baseUrl
|
|
205
|
-
},
|
|
206
|
-
// eslint-disable-next-line @typescript-eslint/require-await
|
|
207
|
-
async onMockedResponse(mockedResponse) {
|
|
208
|
-
const {
|
|
209
|
-
status,
|
|
210
|
-
statusText,
|
|
211
|
-
headers
|
|
212
|
-
} = mockedResponse;
|
|
213
|
-
res.statusCode = status;
|
|
214
|
-
res.statusMessage = statusText;
|
|
215
|
-
headers.forEach((value, name) => {
|
|
216
|
-
res.appendHeader(name, value);
|
|
217
|
-
});
|
|
218
|
-
if (mockedResponse.body) {
|
|
219
|
-
const stream$1 = stream.Readable.fromWeb(mockedResponse.body);
|
|
220
|
-
stream$1.pipe(res);
|
|
221
|
-
} else {
|
|
222
|
-
res.end();
|
|
223
|
-
}
|
|
224
|
-
},
|
|
225
|
-
onPassthroughResponse() {
|
|
226
|
-
next();
|
|
227
|
-
}
|
|
228
|
-
});
|
|
229
|
-
}
|
|
230
|
-
function readJsonFile(arg0) {
|
|
231
|
-
const content = fs2__namespace.readFileSync(arg0, "utf-8");
|
|
232
|
-
return JSON.parse(content);
|
|
233
|
-
}
|
|
234
|
-
|
|
235
|
-
// src/FoundryMiddlewareController.ts
|
|
236
|
-
var FoundryMiddlewareController = class {
|
|
237
|
-
#fauxFoundry;
|
|
238
|
-
#serverUrl;
|
|
239
|
-
#defaultOntologyRid;
|
|
240
|
-
mswEmitter = new EventEmitter__default.default();
|
|
241
|
-
#hooks;
|
|
242
|
-
constructor(serverUrl, defaultOntologyRid, oacEmitter, hooks) {
|
|
243
|
-
this.#serverUrl = serverUrl;
|
|
244
|
-
this.#defaultOntologyRid = defaultOntologyRid;
|
|
245
|
-
this.#hooks = hooks;
|
|
246
|
-
this.#fauxFoundry = this.#createNewFauxFoundry();
|
|
247
|
-
oacEmitter.on("generated", () => {
|
|
248
|
-
this.#reloadOntologyDefinition().catch((e) => {
|
|
249
|
-
console.error("[oac] error reloading ontology definition", e);
|
|
250
|
-
});
|
|
251
|
-
});
|
|
252
|
-
}
|
|
253
|
-
#debugLog(message, ...optionalParams) {
|
|
254
|
-
console.log("[oac]", message, ...optionalParams);
|
|
47
|
+
// src/OacConfig.ts
|
|
48
|
+
var OacConfig = class {
|
|
49
|
+
constructor(config) {
|
|
50
|
+
Object.assign(this, config);
|
|
255
51
|
}
|
|
256
|
-
#infoLog(message, ...optionalParams) {
|
|
257
|
-
console.log("[oac]", message, ...optionalParams);
|
|
258
|
-
}
|
|
259
|
-
#errorLog(message, ...optionalParams) {
|
|
260
|
-
console.error("[oac]", message, ...optionalParams);
|
|
261
|
-
}
|
|
262
|
-
#createNewFauxFoundry() {
|
|
263
|
-
return new faux.FauxFoundry(this.#serverUrl, {
|
|
264
|
-
apiName: "DefaultOntology",
|
|
265
|
-
description: "Description",
|
|
266
|
-
displayName: "Ontology",
|
|
267
|
-
rid: this.#defaultOntologyRid
|
|
268
|
-
});
|
|
269
|
-
}
|
|
270
|
-
async #reloadOntologyDefinition() {
|
|
271
|
-
const fauxFoundry = this.#fauxFoundry = this.#createNewFauxFoundry();
|
|
272
|
-
const ontology = fauxFoundry.getDefaultOntology();
|
|
273
|
-
const ontologyFullMetadata = readJsonFile(".ontology.json");
|
|
274
|
-
registerOntologyFullMetadata(ontology, ontologyFullMetadata);
|
|
275
|
-
try {
|
|
276
|
-
if (this.#hooks?.preSeed) {
|
|
277
|
-
this.#debugLog("[oac] calling preSeed hook");
|
|
278
|
-
}
|
|
279
|
-
await this.#hooks?.preSeed?.(ontology);
|
|
280
|
-
} catch (e) {
|
|
281
|
-
this.#errorLog("[oac] Unhandled error from preSeed hook. Ignoring and continuing.", e);
|
|
282
|
-
}
|
|
283
|
-
this.#debugLog("[osdk] applying seed data");
|
|
284
|
-
await applySeed(this.#fauxFoundry, path2__default.default.resolve(undefined, "..", "..", ".ontology", "seed.ts"));
|
|
285
|
-
this.#infoLog("[osdk] Finished reloading ontology & seed data");
|
|
286
|
-
}
|
|
287
|
-
middleware = async (req, res, next) => {
|
|
288
|
-
return void await routeConnectToMsw(this.#serverUrl, this.#fauxFoundry.handlers, this.mswEmitter, req, res, next);
|
|
289
|
-
};
|
|
290
52
|
};
|
|
291
53
|
async function syncDirectories(sourceDir, targetDir, logger) {
|
|
292
|
-
|
|
54
|
+
logger.debug(`Synchronizing ${sourceDir} to ${targetDir}`);
|
|
55
|
+
await fs__namespace.default.promises.mkdir(targetDir, {
|
|
293
56
|
recursive: true
|
|
294
57
|
});
|
|
295
58
|
const [sourceFiles, targetFiles] = await Promise.all([getAllFiles(sourceDir), getAllFiles(targetDir)]);
|
|
@@ -299,84 +62,71 @@ async function syncDirectories(sourceDir, targetDir, logger) {
|
|
|
299
62
|
let unchangedCount = 0;
|
|
300
63
|
const errors = [];
|
|
301
64
|
for (const relativeFile of sourceFiles) {
|
|
302
|
-
const sourceFile =
|
|
303
|
-
const targetFile =
|
|
65
|
+
const sourceFile = path__namespace.default.join(sourceDir, relativeFile);
|
|
66
|
+
const targetFile = path__namespace.default.join(targetDir, relativeFile);
|
|
304
67
|
try {
|
|
305
68
|
const targetExists = targetFiles.includes(relativeFile);
|
|
306
69
|
const isDifferent = await areFilesDifferent(sourceFile, targetFile);
|
|
307
70
|
if (!targetExists) {
|
|
308
|
-
await
|
|
71
|
+
await fs__namespace.default.promises.mkdir(path__namespace.default.dirname(targetFile), {
|
|
309
72
|
recursive: true
|
|
310
73
|
});
|
|
311
|
-
await
|
|
74
|
+
await fs__namespace.default.promises.copyFile(sourceFile, targetFile);
|
|
312
75
|
addedCount++;
|
|
313
|
-
logger.
|
|
314
|
-
timestamp: true
|
|
315
|
-
});
|
|
76
|
+
logger.debug(`Added: ${relativeFile}`);
|
|
316
77
|
} else if (isDifferent) {
|
|
317
|
-
await
|
|
78
|
+
await fs__namespace.default.promises.copyFile(sourceFile, targetFile);
|
|
318
79
|
updatedCount++;
|
|
319
|
-
logger.
|
|
320
|
-
timestamp: true
|
|
321
|
-
});
|
|
80
|
+
logger.debug(`Updated: ${targetFile}`);
|
|
322
81
|
} else {
|
|
323
82
|
unchangedCount++;
|
|
324
83
|
}
|
|
325
84
|
} catch (error) {
|
|
326
85
|
const errorMsg = `Failed to sync ${relativeFile}: ${error instanceof Error ? error.message : String(error)}`;
|
|
327
86
|
errors.push(errorMsg);
|
|
328
|
-
logger.error(errorMsg
|
|
329
|
-
timestamp: true
|
|
330
|
-
});
|
|
87
|
+
logger.error(errorMsg);
|
|
331
88
|
}
|
|
332
89
|
}
|
|
333
90
|
for (const relativeFile of targetFiles) {
|
|
334
91
|
if (!sourceFiles.includes(relativeFile)) {
|
|
335
|
-
const targetFile =
|
|
92
|
+
const targetFile = path__namespace.default.join(targetDir, relativeFile);
|
|
336
93
|
try {
|
|
337
|
-
await
|
|
94
|
+
await fs__namespace.default.promises.unlink(targetFile);
|
|
338
95
|
removedCount++;
|
|
339
|
-
logger.
|
|
340
|
-
timestamp: true
|
|
341
|
-
});
|
|
96
|
+
logger.debug(`Removed: ${relativeFile}`);
|
|
342
97
|
} catch (error) {
|
|
343
98
|
const errorMsg = `Failed to remove ${relativeFile}: ${error instanceof Error ? error.message : String(error)}`;
|
|
344
99
|
errors.push(errorMsg);
|
|
345
|
-
logger.error(errorMsg
|
|
346
|
-
timestamp: true
|
|
347
|
-
});
|
|
100
|
+
logger.error(errorMsg);
|
|
348
101
|
}
|
|
349
102
|
}
|
|
350
103
|
}
|
|
351
104
|
try {
|
|
352
105
|
await removeEmptyDirectories(targetDir);
|
|
353
106
|
} catch (error) {
|
|
354
|
-
logger.warn(`Failed to clean up empty directories: ${error instanceof Error ? error.message : String(error)}
|
|
355
|
-
timestamp: true
|
|
356
|
-
});
|
|
107
|
+
logger.warn(`Failed to clean up empty directories: ${error instanceof Error ? error.message : String(error)}`);
|
|
357
108
|
}
|
|
109
|
+
logger.debug(`Sync complete: ${addedCount} added, ${updatedCount} updated, ${removedCount} removed, ${unchangedCount} unchanged`);
|
|
358
110
|
if (errors.length > 0) {
|
|
359
|
-
logger.warn(`Encountered ${errors.length} errors during sync
|
|
360
|
-
timestamp: true
|
|
361
|
-
});
|
|
111
|
+
logger.warn(`Encountered ${errors.length} errors during sync`);
|
|
362
112
|
}
|
|
363
113
|
}
|
|
364
114
|
async function removeEmptyDirectories(dir) {
|
|
365
115
|
try {
|
|
366
|
-
const entries = await
|
|
116
|
+
const entries = await fs__namespace.default.promises.readdir(dir, {
|
|
367
117
|
withFileTypes: true
|
|
368
118
|
});
|
|
369
119
|
for (const entry of entries) {
|
|
370
120
|
if (entry.isDirectory()) {
|
|
371
|
-
const subdir =
|
|
121
|
+
const subdir = path__namespace.default.join(dir, entry.name);
|
|
372
122
|
await removeEmptyDirectories(subdir);
|
|
373
123
|
}
|
|
374
124
|
}
|
|
375
|
-
const remainingEntries = await
|
|
125
|
+
const remainingEntries = await fs__namespace.default.promises.readdir(dir);
|
|
376
126
|
if (remainingEntries.length === 0) {
|
|
377
127
|
const targetDir = ".osdk/src";
|
|
378
|
-
if (
|
|
379
|
-
await
|
|
128
|
+
if (path__namespace.default.resolve(dir) !== path__namespace.default.resolve(targetDir)) {
|
|
129
|
+
await fs__namespace.default.promises.rmdir(dir);
|
|
380
130
|
}
|
|
381
131
|
}
|
|
382
132
|
} catch (error) {
|
|
@@ -384,7 +134,7 @@ async function removeEmptyDirectories(dir) {
|
|
|
384
134
|
}
|
|
385
135
|
async function getFileHash(filePath) {
|
|
386
136
|
try {
|
|
387
|
-
const content = await
|
|
137
|
+
const content = await fs__namespace.default.promises.readFile(filePath);
|
|
388
138
|
return crypto2__default.default.createHash("sha256").update(content).digest("hex");
|
|
389
139
|
} catch (error) {
|
|
390
140
|
return "";
|
|
@@ -401,12 +151,12 @@ async function areFilesDifferent(sourceFile, targetFile) {
|
|
|
401
151
|
async function getAllFiles(dir, baseDir = dir) {
|
|
402
152
|
const files = [];
|
|
403
153
|
try {
|
|
404
|
-
const entries = await
|
|
154
|
+
const entries = await fs__namespace.default.promises.readdir(dir, {
|
|
405
155
|
withFileTypes: true
|
|
406
156
|
});
|
|
407
157
|
for (const entry of entries) {
|
|
408
|
-
const fullPath =
|
|
409
|
-
const relativePath =
|
|
158
|
+
const fullPath = path__namespace.default.join(dir, entry.name);
|
|
159
|
+
const relativePath = path__namespace.default.relative(baseDir, fullPath);
|
|
410
160
|
if (entry.isDirectory()) {
|
|
411
161
|
const subFiles = await getAllFiles(fullPath, baseDir);
|
|
412
162
|
files.push(...subFiles);
|
|
@@ -420,203 +170,525 @@ async function getAllFiles(dir, baseDir = dir) {
|
|
|
420
170
|
}
|
|
421
171
|
|
|
422
172
|
// src/generateOntologyAssets.ts
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
}
|
|
428
|
-
|
|
429
|
-
|
|
173
|
+
async function generateOntologyAssets(opts) {
|
|
174
|
+
const {
|
|
175
|
+
ontologyDir,
|
|
176
|
+
logger
|
|
177
|
+
} = opts;
|
|
178
|
+
await fs__namespace.promises.mkdir(opts.workDir, {
|
|
179
|
+
recursive: true
|
|
180
|
+
});
|
|
181
|
+
if (!fs__namespace.existsSync(ontologyDir)) {
|
|
182
|
+
fs__namespace.mkdirSync(ontologyDir, {
|
|
430
183
|
recursive: true
|
|
431
184
|
});
|
|
432
|
-
logger.info("Created .ontology directory"
|
|
433
|
-
timestamp: true
|
|
434
|
-
});
|
|
185
|
+
logger.info("Created .ontology directory");
|
|
435
186
|
}
|
|
436
|
-
await ontologyJsToIr(
|
|
437
|
-
await ontologyIrToFullMetadata(
|
|
438
|
-
await fullMetadataToOsdk(
|
|
187
|
+
await ontologyJsToIr(opts);
|
|
188
|
+
await ontologyIrToFullMetadata(opts);
|
|
189
|
+
await fullMetadataToOsdk(opts);
|
|
439
190
|
}
|
|
440
|
-
|
|
191
|
+
function ontologyIrPath(workDir) {
|
|
192
|
+
return path__namespace.join(workDir, ".ontology.ir.json");
|
|
193
|
+
}
|
|
194
|
+
function ontologyFullMetadataPath(workDir) {
|
|
195
|
+
return path__namespace.join(workDir, ".ontology.json");
|
|
196
|
+
}
|
|
197
|
+
async function ontologyJsToIr({
|
|
198
|
+
logger,
|
|
199
|
+
ontologyDir,
|
|
200
|
+
workDir
|
|
201
|
+
}) {
|
|
202
|
+
logger.debug("Generating Ontology IR");
|
|
441
203
|
const {
|
|
442
204
|
stdout,
|
|
443
205
|
stderr,
|
|
444
206
|
exitCode
|
|
445
|
-
} = await execa.execa("pnpm", ["exec", "maker", "-i",
|
|
207
|
+
} = await execa.execa("pnpm", ["exec", "maker", "-i", `${ontologyDir}/ontology.mts`, "-o", ontologyIrPath(workDir)]);
|
|
446
208
|
if (exitCode !== 0) {
|
|
447
|
-
logger.error(`Ontology IR generation failed with exit code ${exitCode}
|
|
448
|
-
timestamp: true
|
|
449
|
-
});
|
|
209
|
+
logger.error(`Ontology IR generation failed with exit code ${exitCode}`);
|
|
450
210
|
if (stderr) {
|
|
451
|
-
logger.error(`Command stderr: ${stderr}
|
|
452
|
-
timestamp: true
|
|
453
|
-
});
|
|
211
|
+
logger.error(`Command stderr: ${stderr}`);
|
|
454
212
|
}
|
|
455
213
|
throw new Error(`Failed to generate ontology IR: exit code ${exitCode}`);
|
|
456
214
|
}
|
|
215
|
+
if (stdout) {
|
|
216
|
+
logger.debug(`Ontology IR generation output: ${stdout}`);
|
|
217
|
+
}
|
|
457
218
|
}
|
|
458
|
-
async function ontologyIrToFullMetadata(
|
|
219
|
+
async function ontologyIrToFullMetadata({
|
|
220
|
+
logger,
|
|
221
|
+
workDir
|
|
222
|
+
}) {
|
|
223
|
+
logger.debug("Converting IR to Full metadata");
|
|
459
224
|
try {
|
|
460
|
-
const irContent = await
|
|
225
|
+
const irContent = await fs__namespace.promises.readFile(ontologyIrPath(workDir), {
|
|
461
226
|
encoding: "utf-8"
|
|
462
227
|
});
|
|
463
228
|
const blockData = JSON.parse(irContent).blockData;
|
|
464
229
|
const fullMeta = generatorConverters_ontologyir.OntologyIrToFullMetadataConverter.getFullMetadataFromIr(blockData);
|
|
465
|
-
await
|
|
466
|
-
|
|
230
|
+
await fs__namespace.promises.writeFile(ontologyFullMetadataPath(workDir), JSON.stringify(fullMeta, null, 2));
|
|
231
|
+
logger.debug("Successfully converted IR to full metadata");
|
|
467
232
|
} catch (error) {
|
|
468
|
-
logger.error(`Failed to convert IR to full metadata: ${
|
|
469
|
-
timestamp: true
|
|
470
|
-
});
|
|
233
|
+
logger.error(`Failed to convert IR to full metadata: ${util.inspect(error)}`);
|
|
471
234
|
throw error;
|
|
472
235
|
}
|
|
473
236
|
}
|
|
474
|
-
async function fullMetadataToOsdk(
|
|
475
|
-
|
|
476
|
-
|
|
237
|
+
async function fullMetadataToOsdk({
|
|
238
|
+
logger,
|
|
239
|
+
workDir
|
|
240
|
+
}) {
|
|
241
|
+
logger.debug("Generating OSDK from full metadata");
|
|
242
|
+
const tempDir = path__namespace.join(workDir, ".osdkGenerationTmp", "src");
|
|
243
|
+
await fs__namespace.promises.rm(tempDir, {
|
|
477
244
|
recursive: true,
|
|
478
245
|
force: true
|
|
479
246
|
});
|
|
480
|
-
await
|
|
247
|
+
await fs__namespace.promises.mkdir(tempDir, {
|
|
481
248
|
recursive: true
|
|
482
249
|
});
|
|
250
|
+
await fs__namespace.promises.writeFile(path__namespace.join(tempDir, "..", "package.json"), JSON.stringify({}, null, 2), {
|
|
251
|
+
encoding: "utf-8"
|
|
252
|
+
});
|
|
483
253
|
try {
|
|
484
|
-
const tempSrcDir =
|
|
254
|
+
const tempSrcDir = path__namespace.join(tempDir, "src");
|
|
485
255
|
const {
|
|
486
256
|
stdout,
|
|
487
257
|
stderr,
|
|
488
258
|
exitCode
|
|
489
|
-
} = await execa.execa("pnpm", ["exec", "osdk", "unstable", "typescript", "generate", "--outDir", tempSrcDir, "--ontologyPath",
|
|
490
|
-
|
|
259
|
+
} = await execa.execa("pnpm", ["exec", "osdk", "unstable", "typescript", "generate", "--outDir", tempSrcDir, "--ontologyPath", ontologyFullMetadataPath(workDir), "--beta", "true", "--packageType", "module", "--version", "dev"]);
|
|
260
|
+
logger.debug(`OSDK generation output: ${stdout}`);
|
|
491
261
|
if (stderr) {
|
|
492
|
-
logger.error(`OSDK generation stderr: ${stderr}
|
|
493
|
-
timestamp: true
|
|
494
|
-
});
|
|
262
|
+
logger.error(`OSDK generation stderr: ${stderr}`);
|
|
495
263
|
}
|
|
496
264
|
if (exitCode === 0) {
|
|
497
265
|
const targetDir = ".osdk/src";
|
|
498
266
|
try {
|
|
499
|
-
|
|
267
|
+
logger.debug("OSDK generation successful, synchronizing with target directory");
|
|
500
268
|
await syncDirectories(tempSrcDir, targetDir, logger);
|
|
501
|
-
|
|
502
|
-
await
|
|
269
|
+
logger.debug(`Successfully synchronized ${targetDir} with newly generated code`);
|
|
270
|
+
await compileOsdk(logger);
|
|
271
|
+
await fs__namespace.promises.rm(tempDir, {
|
|
503
272
|
recursive: true,
|
|
504
273
|
force: true
|
|
505
274
|
});
|
|
506
275
|
} catch (error) {
|
|
507
|
-
logger.error(`Failed to synchronize ${targetDir} directory: ${
|
|
508
|
-
|
|
509
|
-
});
|
|
510
|
-
logger.error(`Temporary files left at: ${tempDir}`, {
|
|
511
|
-
timestamp: true
|
|
512
|
-
});
|
|
276
|
+
logger.error(`Failed to synchronize ${targetDir} directory: ${util.inspect(error)}`);
|
|
277
|
+
logger.error(`Temporary files left at: ${tempDir}`);
|
|
513
278
|
throw error;
|
|
514
279
|
}
|
|
515
280
|
} else {
|
|
516
|
-
logger.error(`OSDK generation failed with exit code ${exitCode}
|
|
517
|
-
|
|
518
|
-
});
|
|
519
|
-
logger.error(`Temporary files left at: ${tempDir}`, {
|
|
520
|
-
timestamp: true
|
|
521
|
-
});
|
|
281
|
+
logger.error(`OSDK generation failed with exit code ${exitCode}`);
|
|
282
|
+
logger.error(`Temporary files left at: ${tempDir}`);
|
|
522
283
|
throw new Error(`OSDK generation failed with exit code ${exitCode}`);
|
|
523
284
|
}
|
|
524
285
|
} catch (error) {
|
|
525
286
|
try {
|
|
526
|
-
await
|
|
287
|
+
await fs__namespace.promises.rm(tempDir, {
|
|
527
288
|
recursive: true,
|
|
528
289
|
force: true
|
|
529
290
|
});
|
|
530
291
|
} catch (cleanupError) {
|
|
531
|
-
logger.warn(`Failed to clean up temporary directory: ${
|
|
532
|
-
timestamp: true
|
|
533
|
-
});
|
|
292
|
+
logger.warn(`Failed to clean up temporary directory: ${util.inspect(cleanupError)}`);
|
|
534
293
|
}
|
|
535
294
|
throw error;
|
|
536
295
|
}
|
|
537
296
|
}
|
|
538
|
-
function
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
timestamp: true
|
|
297
|
+
async function compileOsdk(logger) {
|
|
298
|
+
const {
|
|
299
|
+
stdout,
|
|
300
|
+
stderr,
|
|
301
|
+
exitCode
|
|
302
|
+
} = await execa.execa("pnpm", ["exec", "tsc"], {
|
|
303
|
+
cwd: ".osdk"
|
|
546
304
|
});
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
});
|
|
551
|
-
logger.info("Created .ontology directory", {
|
|
552
|
-
timestamp: true
|
|
553
|
-
});
|
|
305
|
+
logger.debug(`OSDK generation output: ${stdout}`);
|
|
306
|
+
if (stderr) {
|
|
307
|
+
logger.error(`OSDK generation stderr: ${stderr}`);
|
|
554
308
|
}
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
309
|
+
return exitCode;
|
|
310
|
+
}
|
|
311
|
+
var Level = /* @__PURE__ */ function(Level2) {
|
|
312
|
+
Level2[Level2["debug"] = 0] = "debug";
|
|
313
|
+
Level2[Level2["info"] = 1] = "info";
|
|
314
|
+
Level2[Level2["warn"] = 2] = "warn";
|
|
315
|
+
Level2[Level2["error"] = 3] = "error";
|
|
316
|
+
return Level2;
|
|
317
|
+
}({});
|
|
318
|
+
var Logger = class {
|
|
319
|
+
#level;
|
|
320
|
+
#viteLogger;
|
|
321
|
+
constructor({
|
|
322
|
+
level,
|
|
323
|
+
viteLogger
|
|
324
|
+
}) {
|
|
325
|
+
this.#level = typeof level === "string" ? Level[level] : level;
|
|
326
|
+
this.#viteLogger = viteLogger;
|
|
327
|
+
}
|
|
328
|
+
debug(message) {
|
|
329
|
+
if (this.#level <= Level.debug) {
|
|
330
|
+
this.#viteLogger.info(`${chalk__default.default.cyan("[osdk]")} ${chalk__default.default.gray(`DEBUG: ${message}`)}`, {
|
|
331
|
+
timestamp: true
|
|
332
|
+
});
|
|
562
333
|
}
|
|
563
|
-
|
|
564
|
-
|
|
334
|
+
}
|
|
335
|
+
info(message) {
|
|
336
|
+
if (this.#level <= Level.info) {
|
|
337
|
+
this.#viteLogger.info(`${chalk__default.default.cyan("[osdk]")} ${message}`, {
|
|
565
338
|
timestamp: true
|
|
566
339
|
});
|
|
567
340
|
}
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
341
|
+
}
|
|
342
|
+
error(message) {
|
|
343
|
+
if (this.#level <= Level.error) {
|
|
344
|
+
this.#viteLogger.error(`${chalk__default.default.cyan("[osdk]")} ${chalk__default.default.red(message)}`, {
|
|
345
|
+
timestamp: true
|
|
572
346
|
});
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
warn(message) {
|
|
350
|
+
if (this.#level <= Level.warn) {
|
|
351
|
+
this.#viteLogger.warn(`${chalk__default.default.cyan("[osdk]")} ${chalk__default.default.yellow(message)}`, {
|
|
577
352
|
timestamp: true
|
|
578
353
|
});
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
};
|
|
357
|
+
|
|
358
|
+
// src/OacContext.ts
|
|
359
|
+
var OacContext = class extends OacConfig {
|
|
360
|
+
constructor(config, resolvedConfig) {
|
|
361
|
+
super(config);
|
|
362
|
+
this.serverUrl = `http${resolvedConfig.server.https ? "s" : ""}://localhost:${resolvedConfig.server.port}`;
|
|
363
|
+
this.defaultOntologyRid = `ri.ontology.main.ontology.00000000-0000-0000-0000-000000000000`;
|
|
364
|
+
this.workDir = path__namespace.join("node_modules", ".osdk", ".oac");
|
|
365
|
+
this.logger = new Logger({
|
|
366
|
+
level: config.loggerLevel ?? Level.info,
|
|
367
|
+
viteLogger: resolvedConfig.logger
|
|
579
368
|
});
|
|
369
|
+
this.workDir = path__namespace.join("node_modules", ".osdk", ".oac");
|
|
580
370
|
}
|
|
371
|
+
fauxFoundryFactory = () => {
|
|
372
|
+
return new faux.FauxFoundry(this.serverUrl, {
|
|
373
|
+
apiName: "DefaultOntology",
|
|
374
|
+
description: "Description",
|
|
375
|
+
displayName: "Ontology",
|
|
376
|
+
rid: this.defaultOntologyRid
|
|
377
|
+
});
|
|
378
|
+
};
|
|
379
|
+
};
|
|
380
|
+
async function applySeed(fauxFoundry, seedPath) {
|
|
381
|
+
const jiti$1 = jiti.createJiti(undefined, {
|
|
382
|
+
moduleCache: false,
|
|
383
|
+
debug: false,
|
|
384
|
+
importMeta: ({ url: (typeof document === 'undefined' ? require('u' + 'rl').pathToFileURL(__filename).href : (_documentCurrentScript && _documentCurrentScript.tagName.toUpperCase() === 'SCRIPT' && _documentCurrentScript.src || new URL('index.cjs', document.baseURI).href)) })
|
|
385
|
+
});
|
|
386
|
+
const module = await jiti$1.import(seedPath);
|
|
387
|
+
module.default(fauxFoundry);
|
|
581
388
|
}
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
389
|
+
util.inspect.defaultOptions = {
|
|
390
|
+
colors: true,
|
|
391
|
+
depth: Infinity
|
|
392
|
+
};
|
|
393
|
+
function registerOntologyFullMetadata(ontology, ontologyFullMetadata) {
|
|
394
|
+
Object.values(ontologyFullMetadata.objectTypes).forEach((objectType) => {
|
|
395
|
+
ontology.registerObjectType(objectType);
|
|
396
|
+
});
|
|
397
|
+
Object.values(ontologyFullMetadata.actionTypes).forEach((actionType) => {
|
|
398
|
+
const implementation = createActionImplementation(actionType, ontologyFullMetadata);
|
|
399
|
+
const actionTypeWithCamelCaseApiName = {
|
|
400
|
+
...actionType,
|
|
401
|
+
apiName: camelcase(actionType.apiName)
|
|
402
|
+
};
|
|
403
|
+
ontology.registerActionType(actionTypeWithCamelCaseApiName, implementation);
|
|
404
|
+
});
|
|
405
|
+
Object.values(ontologyFullMetadata.sharedPropertyTypes).forEach((actionType) => {
|
|
406
|
+
ontology.registerSharedPropertyType(actionType);
|
|
407
|
+
});
|
|
408
|
+
Object.values(ontologyFullMetadata.queryTypes).forEach((query) => {
|
|
409
|
+
ontology.registerQueryType(query);
|
|
410
|
+
});
|
|
411
|
+
Object.values(ontologyFullMetadata.interfaceTypes).forEach((iface) => {
|
|
412
|
+
ontology.registerInterfaceType(iface);
|
|
413
|
+
});
|
|
414
|
+
}
|
|
415
|
+
function createActionImplementation(actionType, fullMetadata) {
|
|
416
|
+
return (batch, payload, _ctx) => {
|
|
417
|
+
const params = payload.parameters;
|
|
418
|
+
for (const operation of actionType.operations) {
|
|
419
|
+
switch (operation.type) {
|
|
420
|
+
case "createObject": {
|
|
421
|
+
const objectType = getObjectTypeForOperation(operation, fullMetadata);
|
|
422
|
+
const primaryKeyProp = objectType.objectType.primaryKey;
|
|
423
|
+
const primaryKey = extractAndDelete(params, primaryKeyProp);
|
|
424
|
+
const objectData = paramsToDataValues(params, actionType);
|
|
425
|
+
batch.addObject(objectType.objectType.apiName, primaryKey, objectData);
|
|
426
|
+
handleObjectLinks(batch, fullMetadata, objectType.objectType.apiName, primaryKey, params);
|
|
427
|
+
break;
|
|
428
|
+
}
|
|
429
|
+
case "modifyObject": {
|
|
430
|
+
const {
|
|
431
|
+
objectType
|
|
432
|
+
} = getObjectTypeForOperation(operation, fullMetadata);
|
|
433
|
+
const primaryKey = extractAndDelete(params, "objectToModifyParameter");
|
|
434
|
+
const targetObject = batch.getObject(objectType.apiName, primaryKey);
|
|
435
|
+
!targetObject ? process.env.NODE_ENV !== "production" ? invariant__default.default(false, `Could not find object ${objectType.apiName} with PK ${primaryKey}`) : invariant__default.default(false) : void 0;
|
|
436
|
+
if (objectType.primaryKey in params) {
|
|
437
|
+
!(params[objectType.primaryKey] === primaryKey) ? process.env.NODE_ENV !== "production" ? invariant__default.default(false, `If the primary key is provided, it must match the 'objectToModifyParameter'`) : invariant__default.default(false) : void 0;
|
|
438
|
+
delete params[objectType.primaryKey];
|
|
439
|
+
}
|
|
440
|
+
const objectData = paramsToDataValues(params, actionType);
|
|
441
|
+
batch.modifyObject(objectType.apiName, primaryKey, objectData);
|
|
442
|
+
handleObjectLinks(batch, fullMetadata, objectType.apiName, primaryKey, params);
|
|
443
|
+
break;
|
|
444
|
+
}
|
|
445
|
+
case "deleteObject": {
|
|
446
|
+
const {
|
|
447
|
+
objectType
|
|
448
|
+
} = getObjectTypeForOperation(operation, fullMetadata);
|
|
449
|
+
const primaryKey = extractAndDelete(params, "objectToDeleteParameter");
|
|
450
|
+
batch.deleteObject(objectType.apiName, primaryKey);
|
|
451
|
+
break;
|
|
452
|
+
}
|
|
453
|
+
case "createLink": {
|
|
454
|
+
const aSideObjectTypeApiName = operation.aSideObjectTypeApiName;
|
|
455
|
+
const bSideObjectTypeApiName = operation.bSideObjectTypeApiName;
|
|
456
|
+
const linkTypeApiNameAtoB = operation.linkTypeApiNameAtoB;
|
|
457
|
+
const aSidePrimaryKey = params.aSidePrimaryKey || params.primaryKey_;
|
|
458
|
+
const bSidePrimaryKey = params.bSidePrimaryKey || params.linkedObjectPrimaryKey;
|
|
459
|
+
if (aSidePrimaryKey && bSidePrimaryKey) {
|
|
460
|
+
batch.addLink(aSideObjectTypeApiName, aSidePrimaryKey, linkTypeApiNameAtoB, bSideObjectTypeApiName, bSidePrimaryKey);
|
|
461
|
+
}
|
|
462
|
+
break;
|
|
463
|
+
}
|
|
464
|
+
case "deleteLink": {
|
|
465
|
+
const aSideObjectTypeApiName = operation.aSideObjectTypeApiName;
|
|
466
|
+
const bSideObjectTypeApiName = operation.bSideObjectTypeApiName;
|
|
467
|
+
const linkTypeApiNameAtoB = operation.linkTypeApiNameAtoB;
|
|
468
|
+
const aSidePrimaryKey = params.aSidePrimaryKey || params.primaryKey_;
|
|
469
|
+
const bSidePrimaryKey = params.bSidePrimaryKey || params.linkedObjectPrimaryKey;
|
|
470
|
+
if (aSidePrimaryKey && bSidePrimaryKey) {
|
|
471
|
+
batch.removeLink(aSideObjectTypeApiName, aSidePrimaryKey, linkTypeApiNameAtoB, bSideObjectTypeApiName, bSidePrimaryKey);
|
|
472
|
+
}
|
|
473
|
+
break;
|
|
474
|
+
}
|
|
475
|
+
// Handle other operation types as needed
|
|
476
|
+
case "createInterfaceObject":
|
|
477
|
+
case "modifyInterfaceObject":
|
|
478
|
+
case "deleteInterfaceObject":
|
|
479
|
+
throw new Error(`Operation type ${operation.type} not implemented yet`);
|
|
480
|
+
default:
|
|
481
|
+
throw new Error(`Unknown operation type: ${operation.type}`);
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
};
|
|
485
|
+
}
|
|
486
|
+
function extractAndDelete(obj, key) {
|
|
487
|
+
const value = obj[key];
|
|
488
|
+
delete obj[key];
|
|
489
|
+
return value;
|
|
490
|
+
}
|
|
491
|
+
function getObjectTypeForOperation(operation, fullMetadata) {
|
|
492
|
+
const objectTypeApiName = operation.objectTypeApiName;
|
|
493
|
+
const objectType = fullMetadata.objectTypes[objectTypeApiName];
|
|
494
|
+
!objectType ? process.env.NODE_ENV !== "production" ? invariant__default.default(false) : invariant__default.default(false) : void 0;
|
|
495
|
+
return objectType;
|
|
496
|
+
}
|
|
497
|
+
function paramsToDataValues(params, actionType) {
|
|
498
|
+
const objectData = {};
|
|
499
|
+
for (const [key, value] of Object.entries(params)) {
|
|
500
|
+
objectData[key] = toDataValue(value, actionType.parameters[key]);
|
|
501
|
+
}
|
|
502
|
+
return objectData;
|
|
503
|
+
}
|
|
504
|
+
function camelcase(apiName) {
|
|
505
|
+
return apiName.toLowerCase().replace(/[-_]+(.)?/g, (_, chr) => chr ? chr.toUpperCase() : "");
|
|
506
|
+
}
|
|
507
|
+
function toDataValue(value, param) {
|
|
508
|
+
if (param.dataType.type === "geohash" && typeof value === "string") {
|
|
509
|
+
return latLonStringToGeoJSON(value);
|
|
510
|
+
}
|
|
511
|
+
return value;
|
|
512
|
+
}
|
|
513
|
+
function latLonStringToGeoJSON(latLonStr) {
|
|
514
|
+
const [lat, lon] = latLonStr.split(",").map(Number);
|
|
515
|
+
if (isNaN(lat) || isNaN(lon) || lat < -90 || lat > 90 || lon < -180 || lon > 180) {
|
|
516
|
+
throw new Error("Invalid latitude or longitude");
|
|
517
|
+
}
|
|
587
518
|
return {
|
|
588
|
-
|
|
519
|
+
type: "Point",
|
|
520
|
+
coordinates: [lon, lat]
|
|
521
|
+
// GeoJSON uses [longitude, latitude]
|
|
522
|
+
};
|
|
523
|
+
}
|
|
524
|
+
function handleObjectLinks(batch, fullMetadata, objectTypeApiName, primaryKey, params) {
|
|
525
|
+
fullMetadata.objectTypes[objectTypeApiName].linkTypes.forEach((link) => {
|
|
526
|
+
const cardinality = link.cardinality;
|
|
527
|
+
if (cardinality === "ONE") {
|
|
528
|
+
for (const foreignObject of batch.getObjects(link.objectTypeApiName)) {
|
|
529
|
+
if (anyValueMatches(params, foreignObject.__primaryKey)) {
|
|
530
|
+
batch.addLink(objectTypeApiName, primaryKey, link.apiName, link.objectTypeApiName, foreignObject.__primaryKey);
|
|
531
|
+
}
|
|
532
|
+
}
|
|
533
|
+
} else {
|
|
534
|
+
for (const foreignObject of batch.getObjects(link.objectTypeApiName)) {
|
|
535
|
+
if (anyValueMatches(foreignObject, primaryKey)) {
|
|
536
|
+
batch.addLink(objectTypeApiName, primaryKey, link.apiName, link.objectTypeApiName, foreignObject.__primaryKey);
|
|
537
|
+
}
|
|
538
|
+
}
|
|
539
|
+
}
|
|
540
|
+
});
|
|
541
|
+
}
|
|
542
|
+
function anyValueMatches(obj, primaryKey) {
|
|
543
|
+
return Object.values(obj).some((val) => val === primaryKey);
|
|
544
|
+
}
|
|
545
|
+
function readJsonFile(arg0) {
|
|
546
|
+
const content = fs__namespace.readFileSync(arg0, "utf-8");
|
|
547
|
+
return JSON.parse(content);
|
|
548
|
+
}
|
|
549
|
+
|
|
550
|
+
// src/applyOntologyAndSeed.ts
|
|
551
|
+
async function applyOntologyAndSeed(fauxFoundry, ctx) {
|
|
552
|
+
const ontology = fauxFoundry.getDefaultOntology();
|
|
553
|
+
const ontologyFullMetadata = readJsonFile(ontologyFullMetadataPath(ctx.workDir));
|
|
554
|
+
registerOntologyFullMetadata(ontology, ontologyFullMetadata);
|
|
555
|
+
try {
|
|
556
|
+
if (ctx.hooks?.preSeed) {
|
|
557
|
+
ctx.logger.debug("calling preSeed hook");
|
|
558
|
+
}
|
|
559
|
+
await ctx.hooks?.preSeed?.(ontology);
|
|
560
|
+
} catch (e) {
|
|
561
|
+
ctx.logger.error(`Unhandled error from preSeed hook. Ignoring and continuing. ${util.inspect(e)}`);
|
|
562
|
+
}
|
|
563
|
+
ctx.logger.debug("applying seed data");
|
|
564
|
+
await applySeed(fauxFoundry, path__namespace.resolve(ctx.ontologyDir, "seed.ts"));
|
|
565
|
+
}
|
|
566
|
+
var OacServerContext = class extends OacContext {
|
|
567
|
+
constructor(config, server) {
|
|
568
|
+
super(config, server.config);
|
|
569
|
+
this.watcher = server.watcher;
|
|
570
|
+
this.emitter = new Emittery__default.default();
|
|
571
|
+
}
|
|
572
|
+
};
|
|
573
|
+
async function routeConnectToMsw(baseUrl, handlers, emitter, req, res, next) {
|
|
574
|
+
const method = req.method ?? "GET";
|
|
575
|
+
const canRequestHaveBody = method !== "HEAD" && method !== "GET";
|
|
576
|
+
const mockRequest = new Request(new URL(req.url, baseUrl), {
|
|
577
|
+
method,
|
|
578
|
+
headers: new Headers(req.headers),
|
|
579
|
+
credentials: "omit",
|
|
580
|
+
// @ts-expect-error Internal Undici property.
|
|
581
|
+
duplex: canRequestHaveBody ? "half" : void 0,
|
|
582
|
+
body: canRequestHaveBody ? stream.Readable.toWeb(req) : void 0
|
|
583
|
+
});
|
|
584
|
+
await faux.msw.handleRequest(mockRequest, crypto.randomUUID(), handlers, {
|
|
585
|
+
onUnhandledRequest: "bypass"
|
|
586
|
+
}, emitter, {
|
|
587
|
+
resolutionContext: {
|
|
588
|
+
baseUrl
|
|
589
|
+
},
|
|
589
590
|
// eslint-disable-next-line @typescript-eslint/require-await
|
|
590
|
-
async
|
|
591
|
-
|
|
591
|
+
async onMockedResponse(mockedResponse) {
|
|
592
|
+
const {
|
|
593
|
+
status,
|
|
594
|
+
statusText,
|
|
595
|
+
headers
|
|
596
|
+
} = mockedResponse;
|
|
597
|
+
res.statusCode = status;
|
|
598
|
+
res.statusMessage = statusText;
|
|
599
|
+
headers.forEach((value, name) => {
|
|
600
|
+
res.appendHeader(name, value);
|
|
601
|
+
});
|
|
602
|
+
if (mockedResponse.body) {
|
|
603
|
+
const stream$1 = stream.Readable.fromWeb(mockedResponse.body);
|
|
604
|
+
stream$1.pipe(res);
|
|
605
|
+
} else {
|
|
606
|
+
res.end();
|
|
607
|
+
}
|
|
592
608
|
},
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
609
|
+
onPassthroughResponse() {
|
|
610
|
+
next();
|
|
611
|
+
}
|
|
612
|
+
});
|
|
613
|
+
}
|
|
614
|
+
|
|
615
|
+
// src/OacDevServer.ts
|
|
616
|
+
var OacDevServer = class extends OacServerContext {
|
|
617
|
+
constructor(config, server) {
|
|
618
|
+
super(config, server);
|
|
619
|
+
this.foundry = this.fauxFoundryFactory();
|
|
620
|
+
this.emitter.on("generatedOntologyAssets", async () => {
|
|
621
|
+
this.foundry = this.fauxFoundryFactory();
|
|
622
|
+
await applyOntologyAndSeed(this.foundry, this);
|
|
623
|
+
server.hot.send({
|
|
624
|
+
type: "full-reload"
|
|
625
|
+
});
|
|
626
|
+
});
|
|
627
|
+
server.middlewares.use(this.#connectMiddleware);
|
|
628
|
+
}
|
|
629
|
+
#connectMiddleware = async (req, res, next) => {
|
|
630
|
+
const mswEmitter = new EventEmitter__default.default();
|
|
631
|
+
return void await routeConnectToMsw(this.serverUrl, this.foundry.handlers, mswEmitter, req, res, next);
|
|
632
|
+
};
|
|
633
|
+
watchOntologyAsCode = async () => {
|
|
634
|
+
const {
|
|
635
|
+
ontologyDir,
|
|
636
|
+
watcher
|
|
637
|
+
} = this;
|
|
638
|
+
this.logger.info(`Starting OAC file watcher for ${ontologyDir}`);
|
|
639
|
+
if (!fs__namespace.existsSync(ontologyDir)) {
|
|
640
|
+
fs__namespace.mkdirSync(ontologyDir, {
|
|
641
|
+
recursive: true
|
|
642
|
+
});
|
|
643
|
+
}
|
|
644
|
+
watcher.add(ontologyDir);
|
|
645
|
+
watcher.on("add", this.#handleOacFileChanged).on("change", this.#handleOacFileChanged).on("unlink", this.#handleOacFileChanged);
|
|
646
|
+
await this.#handleOacFileChanged(void 0);
|
|
647
|
+
return;
|
|
648
|
+
};
|
|
649
|
+
#handleOacFileChanged = async (filePath) => {
|
|
650
|
+
if (filePath && !filePath.startsWith(`${this.ontologyDir}/`)) {
|
|
651
|
+
return;
|
|
652
|
+
}
|
|
653
|
+
if (filePath) {
|
|
654
|
+
this.logger.info(`File ${filePath} changed.`);
|
|
655
|
+
}
|
|
656
|
+
try {
|
|
657
|
+
await fs__namespace.promises.mkdir(this.workDir, {
|
|
658
|
+
recursive: true
|
|
598
659
|
});
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
660
|
+
await generateOntologyAssets(this);
|
|
661
|
+
await this.emitter.emit("generatedOntologyAssets");
|
|
662
|
+
} catch (error) {
|
|
663
|
+
this.logger.error(`Error generating ontology assets: ${util__namespace.inspect(error)}`);
|
|
664
|
+
}
|
|
665
|
+
};
|
|
666
|
+
};
|
|
667
|
+
|
|
668
|
+
// src/ontologyAsCode.ts
|
|
669
|
+
function ontologyAsCode(oacConfig) {
|
|
670
|
+
let viteConfig;
|
|
671
|
+
return {
|
|
672
|
+
name: "oac-vite-plugin",
|
|
673
|
+
configResolved(resolvedConfig) {
|
|
674
|
+
viteConfig = resolvedConfig;
|
|
675
|
+
},
|
|
676
|
+
async configureServer(server) {
|
|
677
|
+
const oacDevServer = new OacDevServer(oacConfig, server);
|
|
678
|
+
await oacDevServer.watchOntologyAsCode();
|
|
602
679
|
},
|
|
603
680
|
async buildStart() {
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
});
|
|
681
|
+
const ctx = new OacContext(oacConfig, viteConfig);
|
|
682
|
+
if (viteConfig.command === "build") {
|
|
683
|
+
ctx.logger.info("Generating ontology assets for build...");
|
|
608
684
|
try {
|
|
609
|
-
await
|
|
610
|
-
|
|
611
|
-
ontologyDir
|
|
612
|
-
});
|
|
613
|
-
config.logger.info("Successfully generated ontology assets for build", {
|
|
614
|
-
timestamp: true
|
|
685
|
+
await fs__namespace.promises.mkdir(ctx.workDir, {
|
|
686
|
+
recursive: true
|
|
615
687
|
});
|
|
688
|
+
await generateOntologyAssets(ctx);
|
|
689
|
+
ctx.logger.info("Successfully generated ontology assets for build");
|
|
616
690
|
} catch (error) {
|
|
617
|
-
|
|
618
|
-
timestamp: true
|
|
619
|
-
});
|
|
691
|
+
ctx.logger.error(`Failed to generate ontology assets: ${error instanceof Error ? error.message : String(error)}`);
|
|
620
692
|
throw error;
|
|
621
693
|
}
|
|
622
694
|
}
|
|
@@ -624,6 +696,7 @@ function ontologyAsCode(opts) {
|
|
|
624
696
|
};
|
|
625
697
|
}
|
|
626
698
|
|
|
699
|
+
exports.OacConfig = OacConfig;
|
|
627
700
|
exports.ontologyAsCode = ontologyAsCode;
|
|
628
701
|
//# sourceMappingURL=index.cjs.map
|
|
629
702
|
//# sourceMappingURL=index.cjs.map
|