@osdk/vite-plugin-oac 0.1.0-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +146 -0
- package/build/browser/FauxFoundryTypes.js +2 -0
- package/build/browser/FauxFoundryTypes.js.map +1 -0
- package/build/browser/FoundryMiddlewareController.js +88 -0
- package/build/browser/FoundryMiddlewareController.js.map +1 -0
- package/build/browser/applySeed.js +27 -0
- package/build/browser/applySeed.js.map +1 -0
- package/build/browser/generateOntologyAssets.js +205 -0
- package/build/browser/generateOntologyAssets.js.map +1 -0
- package/build/browser/index.js +69 -0
- package/build/browser/index.js.map +1 -0
- package/build/browser/registerOntologyFullMetadata.js +194 -0
- package/build/browser/registerOntologyFullMetadata.js.map +1 -0
- package/build/browser/routeConnectToMsw.js +61 -0
- package/build/browser/routeConnectToMsw.js.map +1 -0
- package/build/browser/syncDirectories.js +204 -0
- package/build/browser/syncDirectories.js.map +1 -0
- package/build/browser/utils/readJsonFile.js +22 -0
- package/build/browser/utils/readJsonFile.js.map +1 -0
- package/build/browser/watchOntologyAsCode.js +69 -0
- package/build/browser/watchOntologyAsCode.js.map +1 -0
- package/build/cjs/index.cjs +629 -0
- package/build/cjs/index.cjs.map +1 -0
- package/build/cjs/index.d.cts +14 -0
- package/build/esm/FauxFoundryTypes.js +2 -0
- package/build/esm/FauxFoundryTypes.js.map +1 -0
- package/build/esm/FoundryMiddlewareController.js +88 -0
- package/build/esm/FoundryMiddlewareController.js.map +1 -0
- package/build/esm/applySeed.js +27 -0
- package/build/esm/applySeed.js.map +1 -0
- package/build/esm/generateOntologyAssets.js +205 -0
- package/build/esm/generateOntologyAssets.js.map +1 -0
- package/build/esm/index.js +69 -0
- package/build/esm/index.js.map +1 -0
- package/build/esm/registerOntologyFullMetadata.js +194 -0
- package/build/esm/registerOntologyFullMetadata.js.map +1 -0
- package/build/esm/routeConnectToMsw.js +61 -0
- package/build/esm/routeConnectToMsw.js.map +1 -0
- package/build/esm/syncDirectories.js +204 -0
- package/build/esm/syncDirectories.js.map +1 -0
- package/build/esm/utils/readJsonFile.js +22 -0
- package/build/esm/utils/readJsonFile.js.map +1 -0
- package/build/esm/watchOntologyAsCode.js +69 -0
- package/build/esm/watchOntologyAsCode.js.map +1 -0
- package/build/types/FauxFoundryTypes.d.ts +2 -0
- package/build/types/FauxFoundryTypes.d.ts.map +1 -0
- package/build/types/FoundryMiddlewareController.d.ts +14 -0
- package/build/types/FoundryMiddlewareController.d.ts.map +1 -0
- package/build/types/applySeed.d.ts +2 -0
- package/build/types/applySeed.d.ts.map +1 -0
- package/build/types/generateOntologyAssets.d.ts +12 -0
- package/build/types/generateOntologyAssets.d.ts.map +1 -0
- package/build/types/index.d.ts +11 -0
- package/build/types/index.d.ts.map +1 -0
- package/build/types/registerOntologyFullMetadata.d.ts +3 -0
- package/build/types/registerOntologyFullMetadata.d.ts.map +1 -0
- package/build/types/routeConnectToMsw.d.ts +6 -0
- package/build/types/routeConnectToMsw.d.ts.map +1 -0
- package/build/types/syncDirectories.d.ts +5 -0
- package/build/types/syncDirectories.d.ts.map +1 -0
- package/build/types/utils/readJsonFile.d.ts +1 -0
- package/build/types/utils/readJsonFile.d.ts.map +1 -0
- package/build/types/watchOntologyAsCode.d.ts +10 -0
- package/build/types/watchOntologyAsCode.d.ts.map +1 -0
- package/package.json +81 -0
|
@@ -0,0 +1,629 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var path2 = require('path');
|
|
4
|
+
var faux = require('@osdk/faux');
|
|
5
|
+
var EventEmitter = require('events');
|
|
6
|
+
var jiti = require('jiti');
|
|
7
|
+
var stream = require('stream');
|
|
8
|
+
var fs2 = require('fs');
|
|
9
|
+
var generatorConverters_ontologyir = require('@osdk/generator-converters.ontologyir');
|
|
10
|
+
var execa = require('execa');
|
|
11
|
+
var crypto2 = require('crypto');
|
|
12
|
+
|
|
13
|
+
var _documentCurrentScript = typeof document !== 'undefined' ? document.currentScript : null;
|
|
14
|
+
function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
|
|
15
|
+
|
|
16
|
+
function _interopNamespace(e) {
|
|
17
|
+
if (e && e.__esModule) return e;
|
|
18
|
+
var n = Object.create(null);
|
|
19
|
+
if (e) {
|
|
20
|
+
Object.keys(e).forEach(function (k) {
|
|
21
|
+
if (k !== 'default') {
|
|
22
|
+
var d = Object.getOwnPropertyDescriptor(e, k);
|
|
23
|
+
Object.defineProperty(n, k, d.get ? d : {
|
|
24
|
+
enumerable: true,
|
|
25
|
+
get: function () { return e[k]; }
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
n.default = e;
|
|
31
|
+
return Object.freeze(n);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
var path2__default = /*#__PURE__*/_interopDefault(path2);
|
|
35
|
+
var EventEmitter__default = /*#__PURE__*/_interopDefault(EventEmitter);
|
|
36
|
+
var fs2__namespace = /*#__PURE__*/_interopNamespace(fs2);
|
|
37
|
+
var crypto2__default = /*#__PURE__*/_interopDefault(crypto2);
|
|
38
|
+
|
|
39
|
+
// src/index.ts
|
|
40
|
+
async function applySeed(fauxFoundry, seedPath) {
|
|
41
|
+
const jiti$1 = jiti.createJiti(undefined, {
|
|
42
|
+
moduleCache: false,
|
|
43
|
+
debug: false,
|
|
44
|
+
importMeta: ({ url: (typeof document === 'undefined' ? require('u' + 'rl').pathToFileURL(__filename).href : (_documentCurrentScript && _documentCurrentScript.tagName.toUpperCase() === 'SCRIPT' && _documentCurrentScript.src || new URL('index.cjs', document.baseURI).href)) })
|
|
45
|
+
});
|
|
46
|
+
const module = await jiti$1.import(seedPath);
|
|
47
|
+
module.default(fauxFoundry);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// src/registerOntologyFullMetadata.ts
|
|
51
|
+
function registerOntologyFullMetadata(ontology, ontologyFullMetadata) {
|
|
52
|
+
Object.values(ontologyFullMetadata.objectTypes).forEach((objectType) => {
|
|
53
|
+
ontology.registerObjectType(objectType);
|
|
54
|
+
});
|
|
55
|
+
Object.values(ontologyFullMetadata.actionTypes).forEach((actionType) => {
|
|
56
|
+
const implementation = createActionImplementation(actionType);
|
|
57
|
+
const actionTypeWithCamelCaseApiName = {
|
|
58
|
+
...actionType,
|
|
59
|
+
apiName: camelcase(actionType.apiName)
|
|
60
|
+
};
|
|
61
|
+
ontology.registerActionType(actionTypeWithCamelCaseApiName, implementation);
|
|
62
|
+
});
|
|
63
|
+
Object.values(ontologyFullMetadata.sharedPropertyTypes).forEach((actionType) => {
|
|
64
|
+
ontology.registerSharedPropertyType(actionType);
|
|
65
|
+
});
|
|
66
|
+
Object.values(ontologyFullMetadata.queryTypes).forEach((query) => {
|
|
67
|
+
ontology.registerQueryType(query);
|
|
68
|
+
});
|
|
69
|
+
Object.values(ontologyFullMetadata.interfaceTypes).forEach((iface) => {
|
|
70
|
+
ontology.registerInterfaceType(iface);
|
|
71
|
+
});
|
|
72
|
+
}
|
|
73
|
+
function createActionImplementation(actionType) {
|
|
74
|
+
return (batch, payload, _ctx) => {
|
|
75
|
+
const params = payload.parameters;
|
|
76
|
+
for (const operation of actionType.operations) {
|
|
77
|
+
switch (operation.type) {
|
|
78
|
+
case "createObject": {
|
|
79
|
+
const objectTypeApiName = operation.objectTypeApiName;
|
|
80
|
+
const primaryKey = params.primaryKey_;
|
|
81
|
+
const objectData = {};
|
|
82
|
+
for (const [key, value] of Object.entries(params)) {
|
|
83
|
+
if (key !== "primaryKey_") {
|
|
84
|
+
const param = actionType.parameters[key];
|
|
85
|
+
objectData[key] = toDataValue(value, param);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
batch.addObject(objectTypeApiName, primaryKey, objectData);
|
|
89
|
+
break;
|
|
90
|
+
}
|
|
91
|
+
case "modifyObject": {
|
|
92
|
+
let objectTypeApiName;
|
|
93
|
+
let primaryKey;
|
|
94
|
+
if (typeof operation.objectTypeApiName === "string") {
|
|
95
|
+
objectTypeApiName = operation.objectTypeApiName;
|
|
96
|
+
primaryKey = params.primaryKey_;
|
|
97
|
+
} else {
|
|
98
|
+
const objectToModify = params[operation.objectTypeApiName];
|
|
99
|
+
if (objectToModify) {
|
|
100
|
+
objectTypeApiName = objectToModify.objectTypeApiName || objectToModify;
|
|
101
|
+
primaryKey = objectToModify.primaryKeyValue || params.primaryKey_;
|
|
102
|
+
} else {
|
|
103
|
+
objectTypeApiName = operation.objectTypeApiName;
|
|
104
|
+
primaryKey = params.primaryKey_;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
const objectData = {};
|
|
108
|
+
for (const [key, value] of Object.entries(params)) {
|
|
109
|
+
if (key !== "primaryKey_" && key !== "objectToModifyParameter") {
|
|
110
|
+
const param = actionType.parameters[key];
|
|
111
|
+
objectData[key] = toDataValue(value, param);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
batch.modifyObject(objectTypeApiName, primaryKey, objectData);
|
|
115
|
+
break;
|
|
116
|
+
}
|
|
117
|
+
case "deleteObject": {
|
|
118
|
+
let objectTypeApiName;
|
|
119
|
+
let primaryKey;
|
|
120
|
+
if (typeof operation.objectTypeApiName === "string") {
|
|
121
|
+
objectTypeApiName = operation.objectTypeApiName;
|
|
122
|
+
primaryKey = params.primaryKey_;
|
|
123
|
+
} else {
|
|
124
|
+
const objectToDelete = params[operation.objectTypeApiName];
|
|
125
|
+
if (objectToDelete) {
|
|
126
|
+
objectTypeApiName = objectToDelete.objectTypeApiName || objectToDelete;
|
|
127
|
+
primaryKey = objectToDelete.primaryKeyValue || params.primaryKey_;
|
|
128
|
+
} else {
|
|
129
|
+
objectTypeApiName = operation.objectTypeApiName;
|
|
130
|
+
primaryKey = params.primaryKey_;
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
batch.deleteObject(objectTypeApiName, primaryKey);
|
|
134
|
+
break;
|
|
135
|
+
}
|
|
136
|
+
case "createLink": {
|
|
137
|
+
const aSideObjectTypeApiName = operation.aSideObjectTypeApiName;
|
|
138
|
+
const bSideObjectTypeApiName = operation.bSideObjectTypeApiName;
|
|
139
|
+
const linkTypeApiNameAtoB = operation.linkTypeApiNameAtoB;
|
|
140
|
+
const aSidePrimaryKey = params.aSidePrimaryKey || params.primaryKey_;
|
|
141
|
+
const bSidePrimaryKey = params.bSidePrimaryKey || params.linkedObjectPrimaryKey;
|
|
142
|
+
if (aSidePrimaryKey && bSidePrimaryKey) {
|
|
143
|
+
batch.addLink(aSideObjectTypeApiName, aSidePrimaryKey, linkTypeApiNameAtoB, bSideObjectTypeApiName, bSidePrimaryKey);
|
|
144
|
+
}
|
|
145
|
+
break;
|
|
146
|
+
}
|
|
147
|
+
case "deleteLink": {
|
|
148
|
+
const aSideObjectTypeApiName = operation.aSideObjectTypeApiName;
|
|
149
|
+
const bSideObjectTypeApiName = operation.bSideObjectTypeApiName;
|
|
150
|
+
const linkTypeApiNameAtoB = operation.linkTypeApiNameAtoB;
|
|
151
|
+
const aSidePrimaryKey = params.aSidePrimaryKey || params.primaryKey_;
|
|
152
|
+
const bSidePrimaryKey = params.bSidePrimaryKey || params.linkedObjectPrimaryKey;
|
|
153
|
+
if (aSidePrimaryKey && bSidePrimaryKey) {
|
|
154
|
+
batch.removeLink(aSideObjectTypeApiName, aSidePrimaryKey, linkTypeApiNameAtoB, bSideObjectTypeApiName, bSidePrimaryKey);
|
|
155
|
+
}
|
|
156
|
+
break;
|
|
157
|
+
}
|
|
158
|
+
// Handle other operation types as needed
|
|
159
|
+
case "createInterfaceObject":
|
|
160
|
+
case "modifyInterfaceObject":
|
|
161
|
+
case "deleteInterfaceObject":
|
|
162
|
+
throw new Error(`Operation type ${operation.type} not implemented yet`);
|
|
163
|
+
default:
|
|
164
|
+
throw new Error(`Unknown operation type: ${operation.type}`);
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
};
|
|
168
|
+
}
|
|
169
|
+
function camelcase(apiName) {
|
|
170
|
+
return apiName.toLowerCase().replace(/[-_]+(.)?/g, (_, chr) => chr ? chr.toUpperCase() : "");
|
|
171
|
+
}
|
|
172
|
+
function toDataValue(value, param) {
|
|
173
|
+
if (param.dataType.type === "geoshape" && typeof value === "string") {
|
|
174
|
+
return latLonStringToGeoJSON(value);
|
|
175
|
+
}
|
|
176
|
+
return value;
|
|
177
|
+
}
|
|
178
|
+
function latLonStringToGeoJSON(latLonStr) {
|
|
179
|
+
const [lat, lon] = latLonStr.split(",").map(Number);
|
|
180
|
+
if (isNaN(lat) || isNaN(lon) || lat < -90 || lat > 90 || lon < -180 || lon > 180) {
|
|
181
|
+
throw new Error("Invalid latitude or longitude");
|
|
182
|
+
}
|
|
183
|
+
return {
|
|
184
|
+
type: "Point",
|
|
185
|
+
coordinates: [lon, lat]
|
|
186
|
+
// GeoJSON uses [longitude, latitude]
|
|
187
|
+
};
|
|
188
|
+
}
|
|
189
|
+
async function routeConnectToMsw(baseUrl, handlers, emitter, req, res, next) {
|
|
190
|
+
const method = req.method ?? "GET";
|
|
191
|
+
const canRequestHaveBody = method !== "HEAD" && method !== "GET";
|
|
192
|
+
const mockRequest = new Request(new URL(req.url, baseUrl), {
|
|
193
|
+
method,
|
|
194
|
+
headers: new Headers(req.headers),
|
|
195
|
+
credentials: "omit",
|
|
196
|
+
// @ts-expect-error Internal Undici property.
|
|
197
|
+
duplex: canRequestHaveBody ? "half" : void 0,
|
|
198
|
+
body: canRequestHaveBody ? stream.Readable.toWeb(req) : void 0
|
|
199
|
+
});
|
|
200
|
+
await faux.msw.handleRequest(mockRequest, crypto.randomUUID(), handlers, {
|
|
201
|
+
onUnhandledRequest: "bypass"
|
|
202
|
+
}, emitter, {
|
|
203
|
+
resolutionContext: {
|
|
204
|
+
baseUrl
|
|
205
|
+
},
|
|
206
|
+
// eslint-disable-next-line @typescript-eslint/require-await
|
|
207
|
+
async onMockedResponse(mockedResponse) {
|
|
208
|
+
const {
|
|
209
|
+
status,
|
|
210
|
+
statusText,
|
|
211
|
+
headers
|
|
212
|
+
} = mockedResponse;
|
|
213
|
+
res.statusCode = status;
|
|
214
|
+
res.statusMessage = statusText;
|
|
215
|
+
headers.forEach((value, name) => {
|
|
216
|
+
res.appendHeader(name, value);
|
|
217
|
+
});
|
|
218
|
+
if (mockedResponse.body) {
|
|
219
|
+
const stream$1 = stream.Readable.fromWeb(mockedResponse.body);
|
|
220
|
+
stream$1.pipe(res);
|
|
221
|
+
} else {
|
|
222
|
+
res.end();
|
|
223
|
+
}
|
|
224
|
+
},
|
|
225
|
+
onPassthroughResponse() {
|
|
226
|
+
next();
|
|
227
|
+
}
|
|
228
|
+
});
|
|
229
|
+
}
|
|
230
|
+
function readJsonFile(arg0) {
|
|
231
|
+
const content = fs2__namespace.readFileSync(arg0, "utf-8");
|
|
232
|
+
return JSON.parse(content);
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
// src/FoundryMiddlewareController.ts
|
|
236
|
+
var FoundryMiddlewareController = class {
|
|
237
|
+
#fauxFoundry;
|
|
238
|
+
#serverUrl;
|
|
239
|
+
#defaultOntologyRid;
|
|
240
|
+
mswEmitter = new EventEmitter__default.default();
|
|
241
|
+
#hooks;
|
|
242
|
+
constructor(serverUrl, defaultOntologyRid, oacEmitter, hooks) {
|
|
243
|
+
this.#serverUrl = serverUrl;
|
|
244
|
+
this.#defaultOntologyRid = defaultOntologyRid;
|
|
245
|
+
this.#hooks = hooks;
|
|
246
|
+
this.#fauxFoundry = this.#createNewFauxFoundry();
|
|
247
|
+
oacEmitter.on("generated", () => {
|
|
248
|
+
this.#reloadOntologyDefinition().catch((e) => {
|
|
249
|
+
console.error("[oac] error reloading ontology definition", e);
|
|
250
|
+
});
|
|
251
|
+
});
|
|
252
|
+
}
|
|
253
|
+
#debugLog(message, ...optionalParams) {
|
|
254
|
+
console.log("[oac]", message, ...optionalParams);
|
|
255
|
+
}
|
|
256
|
+
#infoLog(message, ...optionalParams) {
|
|
257
|
+
console.log("[oac]", message, ...optionalParams);
|
|
258
|
+
}
|
|
259
|
+
#errorLog(message, ...optionalParams) {
|
|
260
|
+
console.error("[oac]", message, ...optionalParams);
|
|
261
|
+
}
|
|
262
|
+
#createNewFauxFoundry() {
|
|
263
|
+
return new faux.FauxFoundry(this.#serverUrl, {
|
|
264
|
+
apiName: "DefaultOntology",
|
|
265
|
+
description: "Description",
|
|
266
|
+
displayName: "Ontology",
|
|
267
|
+
rid: this.#defaultOntologyRid
|
|
268
|
+
});
|
|
269
|
+
}
|
|
270
|
+
async #reloadOntologyDefinition() {
|
|
271
|
+
const fauxFoundry = this.#fauxFoundry = this.#createNewFauxFoundry();
|
|
272
|
+
const ontology = fauxFoundry.getDefaultOntology();
|
|
273
|
+
const ontologyFullMetadata = readJsonFile(".ontology.json");
|
|
274
|
+
registerOntologyFullMetadata(ontology, ontologyFullMetadata);
|
|
275
|
+
try {
|
|
276
|
+
if (this.#hooks?.preSeed) {
|
|
277
|
+
this.#debugLog("[oac] calling preSeed hook");
|
|
278
|
+
}
|
|
279
|
+
await this.#hooks?.preSeed?.(ontology);
|
|
280
|
+
} catch (e) {
|
|
281
|
+
this.#errorLog("[oac] Unhandled error from preSeed hook. Ignoring and continuing.", e);
|
|
282
|
+
}
|
|
283
|
+
this.#debugLog("[osdk] applying seed data");
|
|
284
|
+
await applySeed(this.#fauxFoundry, path2__default.default.resolve(undefined, "..", "..", ".ontology", "seed.ts"));
|
|
285
|
+
this.#infoLog("[osdk] Finished reloading ontology & seed data");
|
|
286
|
+
}
|
|
287
|
+
middleware = async (req, res, next) => {
|
|
288
|
+
return void await routeConnectToMsw(this.#serverUrl, this.#fauxFoundry.handlers, this.mswEmitter, req, res, next);
|
|
289
|
+
};
|
|
290
|
+
};
|
|
291
|
+
async function syncDirectories(sourceDir, targetDir, logger) {
|
|
292
|
+
await fs2__namespace.default.promises.mkdir(targetDir, {
|
|
293
|
+
recursive: true
|
|
294
|
+
});
|
|
295
|
+
const [sourceFiles, targetFiles] = await Promise.all([getAllFiles(sourceDir), getAllFiles(targetDir)]);
|
|
296
|
+
let updatedCount = 0;
|
|
297
|
+
let addedCount = 0;
|
|
298
|
+
let removedCount = 0;
|
|
299
|
+
let unchangedCount = 0;
|
|
300
|
+
const errors = [];
|
|
301
|
+
for (const relativeFile of sourceFiles) {
|
|
302
|
+
const sourceFile = path2__default.default.join(sourceDir, relativeFile);
|
|
303
|
+
const targetFile = path2__default.default.join(targetDir, relativeFile);
|
|
304
|
+
try {
|
|
305
|
+
const targetExists = targetFiles.includes(relativeFile);
|
|
306
|
+
const isDifferent = await areFilesDifferent(sourceFile, targetFile);
|
|
307
|
+
if (!targetExists) {
|
|
308
|
+
await fs2__namespace.default.promises.mkdir(path2__default.default.dirname(targetFile), {
|
|
309
|
+
recursive: true
|
|
310
|
+
});
|
|
311
|
+
await fs2__namespace.default.promises.copyFile(sourceFile, targetFile);
|
|
312
|
+
addedCount++;
|
|
313
|
+
logger.info(`Added: ${relativeFile}`, {
|
|
314
|
+
timestamp: true
|
|
315
|
+
});
|
|
316
|
+
} else if (isDifferent) {
|
|
317
|
+
await fs2__namespace.default.promises.copyFile(sourceFile, targetFile);
|
|
318
|
+
updatedCount++;
|
|
319
|
+
logger.info(`Updated: ${targetFile}`, {
|
|
320
|
+
timestamp: true
|
|
321
|
+
});
|
|
322
|
+
} else {
|
|
323
|
+
unchangedCount++;
|
|
324
|
+
}
|
|
325
|
+
} catch (error) {
|
|
326
|
+
const errorMsg = `Failed to sync ${relativeFile}: ${error instanceof Error ? error.message : String(error)}`;
|
|
327
|
+
errors.push(errorMsg);
|
|
328
|
+
logger.error(errorMsg, {
|
|
329
|
+
timestamp: true
|
|
330
|
+
});
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
for (const relativeFile of targetFiles) {
|
|
334
|
+
if (!sourceFiles.includes(relativeFile)) {
|
|
335
|
+
const targetFile = path2__default.default.join(targetDir, relativeFile);
|
|
336
|
+
try {
|
|
337
|
+
await fs2__namespace.default.promises.unlink(targetFile);
|
|
338
|
+
removedCount++;
|
|
339
|
+
logger.info(`Removed: ${relativeFile}`, {
|
|
340
|
+
timestamp: true
|
|
341
|
+
});
|
|
342
|
+
} catch (error) {
|
|
343
|
+
const errorMsg = `Failed to remove ${relativeFile}: ${error instanceof Error ? error.message : String(error)}`;
|
|
344
|
+
errors.push(errorMsg);
|
|
345
|
+
logger.error(errorMsg, {
|
|
346
|
+
timestamp: true
|
|
347
|
+
});
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
try {
|
|
352
|
+
await removeEmptyDirectories(targetDir);
|
|
353
|
+
} catch (error) {
|
|
354
|
+
logger.warn(`Failed to clean up empty directories: ${error instanceof Error ? error.message : String(error)}`, {
|
|
355
|
+
timestamp: true
|
|
356
|
+
});
|
|
357
|
+
}
|
|
358
|
+
if (errors.length > 0) {
|
|
359
|
+
logger.warn(`Encountered ${errors.length} errors during sync`, {
|
|
360
|
+
timestamp: true
|
|
361
|
+
});
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
async function removeEmptyDirectories(dir) {
|
|
365
|
+
try {
|
|
366
|
+
const entries = await fs2__namespace.default.promises.readdir(dir, {
|
|
367
|
+
withFileTypes: true
|
|
368
|
+
});
|
|
369
|
+
for (const entry of entries) {
|
|
370
|
+
if (entry.isDirectory()) {
|
|
371
|
+
const subdir = path2__default.default.join(dir, entry.name);
|
|
372
|
+
await removeEmptyDirectories(subdir);
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
const remainingEntries = await fs2__namespace.default.promises.readdir(dir);
|
|
376
|
+
if (remainingEntries.length === 0) {
|
|
377
|
+
const targetDir = ".osdk/src";
|
|
378
|
+
if (path2__default.default.resolve(dir) !== path2__default.default.resolve(targetDir)) {
|
|
379
|
+
await fs2__namespace.default.promises.rmdir(dir);
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
} catch (error) {
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
async function getFileHash(filePath) {
|
|
386
|
+
try {
|
|
387
|
+
const content = await fs2__namespace.default.promises.readFile(filePath);
|
|
388
|
+
return crypto2__default.default.createHash("sha256").update(content).digest("hex");
|
|
389
|
+
} catch (error) {
|
|
390
|
+
return "";
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
async function areFilesDifferent(sourceFile, targetFile) {
|
|
394
|
+
try {
|
|
395
|
+
const [sourceHash, targetHash] = await Promise.all([getFileHash(sourceFile), getFileHash(targetFile)]);
|
|
396
|
+
return sourceHash !== targetHash || sourceHash === "";
|
|
397
|
+
} catch (error) {
|
|
398
|
+
return true;
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
async function getAllFiles(dir, baseDir = dir) {
|
|
402
|
+
const files = [];
|
|
403
|
+
try {
|
|
404
|
+
const entries = await fs2__namespace.default.promises.readdir(dir, {
|
|
405
|
+
withFileTypes: true
|
|
406
|
+
});
|
|
407
|
+
for (const entry of entries) {
|
|
408
|
+
const fullPath = path2__default.default.join(dir, entry.name);
|
|
409
|
+
const relativePath = path2__default.default.relative(baseDir, fullPath);
|
|
410
|
+
if (entry.isDirectory()) {
|
|
411
|
+
const subFiles = await getAllFiles(fullPath, baseDir);
|
|
412
|
+
files.push(...subFiles);
|
|
413
|
+
} else if (entry.isFile()) {
|
|
414
|
+
files.push(relativePath);
|
|
415
|
+
}
|
|
416
|
+
}
|
|
417
|
+
} catch (error) {
|
|
418
|
+
}
|
|
419
|
+
return files;
|
|
420
|
+
}
|
|
421
|
+
|
|
422
|
+
// src/generateOntologyAssets.ts
|
|
423
|
+
var NOISY = false;
|
|
424
|
+
async function generateOntologyAssets({
|
|
425
|
+
logger,
|
|
426
|
+
ontologyDir
|
|
427
|
+
}) {
|
|
428
|
+
if (!fs2__namespace.default.existsSync(ontologyDir)) {
|
|
429
|
+
fs2__namespace.default.mkdirSync(ontologyDir, {
|
|
430
|
+
recursive: true
|
|
431
|
+
});
|
|
432
|
+
logger.info("Created .ontology directory", {
|
|
433
|
+
timestamp: true
|
|
434
|
+
});
|
|
435
|
+
}
|
|
436
|
+
await ontologyJsToIr(logger);
|
|
437
|
+
await ontologyIrToFullMetadata(logger);
|
|
438
|
+
await fullMetadataToOsdk(logger);
|
|
439
|
+
}
|
|
440
|
+
async function ontologyJsToIr(logger) {
|
|
441
|
+
const {
|
|
442
|
+
stdout,
|
|
443
|
+
stderr,
|
|
444
|
+
exitCode
|
|
445
|
+
} = await execa.execa("pnpm", ["exec", "maker", "-i", ".ontology/ontology.mjs", "-o", ".ontology.ir.json"]);
|
|
446
|
+
if (exitCode !== 0) {
|
|
447
|
+
logger.error(`Ontology IR generation failed with exit code ${exitCode}`, {
|
|
448
|
+
timestamp: true
|
|
449
|
+
});
|
|
450
|
+
if (stderr) {
|
|
451
|
+
logger.error(`Command stderr: ${stderr}`, {
|
|
452
|
+
timestamp: true
|
|
453
|
+
});
|
|
454
|
+
}
|
|
455
|
+
throw new Error(`Failed to generate ontology IR: exit code ${exitCode}`);
|
|
456
|
+
}
|
|
457
|
+
}
|
|
458
|
+
async function ontologyIrToFullMetadata(logger) {
|
|
459
|
+
try {
|
|
460
|
+
const irContent = await fs2__namespace.default.promises.readFile("./.ontology.ir.json", {
|
|
461
|
+
encoding: "utf-8"
|
|
462
|
+
});
|
|
463
|
+
const blockData = JSON.parse(irContent).blockData;
|
|
464
|
+
const fullMeta = generatorConverters_ontologyir.OntologyIrToFullMetadataConverter.getFullMetadataFromIr(blockData);
|
|
465
|
+
await fs2__namespace.default.promises.writeFile("./.ontology.json", JSON.stringify(fullMeta, null, 2));
|
|
466
|
+
if (NOISY) ;
|
|
467
|
+
} catch (error) {
|
|
468
|
+
logger.error(`Failed to convert IR to full metadata: ${error instanceof Error ? error.message : String(error)}`, {
|
|
469
|
+
timestamp: true
|
|
470
|
+
});
|
|
471
|
+
throw error;
|
|
472
|
+
}
|
|
473
|
+
}
|
|
474
|
+
async function fullMetadataToOsdk(logger) {
|
|
475
|
+
const tempDir = path2__default.default.join(process.cwd(), "node_modules", ".tmp", "osdkGeneration");
|
|
476
|
+
await fs2__namespace.default.promises.rm(tempDir, {
|
|
477
|
+
recursive: true,
|
|
478
|
+
force: true
|
|
479
|
+
});
|
|
480
|
+
await fs2__namespace.default.promises.mkdir(tempDir, {
|
|
481
|
+
recursive: true
|
|
482
|
+
});
|
|
483
|
+
try {
|
|
484
|
+
const tempSrcDir = path2__default.default.join(tempDir, "src");
|
|
485
|
+
const {
|
|
486
|
+
stdout,
|
|
487
|
+
stderr,
|
|
488
|
+
exitCode
|
|
489
|
+
} = await execa.execa("pnpm", ["exec", "osdk", "unstable", "typescript", "generate", "--outDir", tempSrcDir, "--ontologyPath", ".ontology.json", "--beta", "true", "--packageType", "module", "--version", "dev"]);
|
|
490
|
+
if (stdout && NOISY) ;
|
|
491
|
+
if (stderr) {
|
|
492
|
+
logger.error(`OSDK generation stderr: ${stderr}`, {
|
|
493
|
+
timestamp: true
|
|
494
|
+
});
|
|
495
|
+
}
|
|
496
|
+
if (exitCode === 0) {
|
|
497
|
+
const targetDir = ".osdk/src";
|
|
498
|
+
try {
|
|
499
|
+
if (NOISY) ;
|
|
500
|
+
await syncDirectories(tempSrcDir, targetDir, logger);
|
|
501
|
+
if (NOISY) ;
|
|
502
|
+
await fs2__namespace.default.promises.rm(tempDir, {
|
|
503
|
+
recursive: true,
|
|
504
|
+
force: true
|
|
505
|
+
});
|
|
506
|
+
} catch (error) {
|
|
507
|
+
logger.error(`Failed to synchronize ${targetDir} directory: ${error instanceof Error ? error.message : String(error)}`, {
|
|
508
|
+
timestamp: true
|
|
509
|
+
});
|
|
510
|
+
logger.error(`Temporary files left at: ${tempDir}`, {
|
|
511
|
+
timestamp: true
|
|
512
|
+
});
|
|
513
|
+
throw error;
|
|
514
|
+
}
|
|
515
|
+
} else {
|
|
516
|
+
logger.error(`OSDK generation failed with exit code ${exitCode}`, {
|
|
517
|
+
timestamp: true
|
|
518
|
+
});
|
|
519
|
+
logger.error(`Temporary files left at: ${tempDir}`, {
|
|
520
|
+
timestamp: true
|
|
521
|
+
});
|
|
522
|
+
throw new Error(`OSDK generation failed with exit code ${exitCode}`);
|
|
523
|
+
}
|
|
524
|
+
} catch (error) {
|
|
525
|
+
try {
|
|
526
|
+
await fs2__namespace.default.promises.rm(tempDir, {
|
|
527
|
+
recursive: true,
|
|
528
|
+
force: true
|
|
529
|
+
});
|
|
530
|
+
} catch (cleanupError) {
|
|
531
|
+
logger.warn(`Failed to clean up temporary directory: ${cleanupError instanceof Error ? cleanupError.message : String(cleanupError)}`, {
|
|
532
|
+
timestamp: true
|
|
533
|
+
});
|
|
534
|
+
}
|
|
535
|
+
throw error;
|
|
536
|
+
}
|
|
537
|
+
}
|
|
538
|
+
function watchOntologyAsCode({
|
|
539
|
+
watcher,
|
|
540
|
+
logger,
|
|
541
|
+
ontologyDir
|
|
542
|
+
}) {
|
|
543
|
+
const emitter = new EventEmitter__default.default();
|
|
544
|
+
logger.info(`Starting OAC file watcher for ${ontologyDir}`, {
|
|
545
|
+
timestamp: true
|
|
546
|
+
});
|
|
547
|
+
if (!fs2__namespace.default.existsSync(ontologyDir)) {
|
|
548
|
+
fs2__namespace.default.mkdirSync(ontologyDir, {
|
|
549
|
+
recursive: true
|
|
550
|
+
});
|
|
551
|
+
logger.info("Created .ontology directory", {
|
|
552
|
+
timestamp: true
|
|
553
|
+
});
|
|
554
|
+
}
|
|
555
|
+
watcher.add(ontologyDir);
|
|
556
|
+
watcher.on("add", handleOacFileChanged).on("change", handleOacFileChanged).on("unlink", handleOacFileChanged);
|
|
557
|
+
handleOacFileChanged(void 0);
|
|
558
|
+
return emitter;
|
|
559
|
+
function handleOacFileChanged(filePath) {
|
|
560
|
+
if (filePath && !filePath.startsWith(`${ontologyDir}/`)) {
|
|
561
|
+
return;
|
|
562
|
+
}
|
|
563
|
+
if (filePath) {
|
|
564
|
+
logger.info(`File ${filePath} changed.`, {
|
|
565
|
+
timestamp: true
|
|
566
|
+
});
|
|
567
|
+
}
|
|
568
|
+
(async () => {
|
|
569
|
+
await generateOntologyAssets({
|
|
570
|
+
logger,
|
|
571
|
+
ontologyDir
|
|
572
|
+
});
|
|
573
|
+
emitter.emit("generated");
|
|
574
|
+
})().catch((error) => {
|
|
575
|
+
console.error(error);
|
|
576
|
+
logger.error(`Error executing command: ${error.message}`, {
|
|
577
|
+
timestamp: true
|
|
578
|
+
});
|
|
579
|
+
});
|
|
580
|
+
}
|
|
581
|
+
}
|
|
582
|
+
|
|
583
|
+
// src/index.ts
|
|
584
|
+
function ontologyAsCode(opts) {
|
|
585
|
+
const ontologyDir = path2__default.default.resolve(".ontology");
|
|
586
|
+
let config;
|
|
587
|
+
return {
|
|
588
|
+
name: "oac-vite-plugin",
|
|
589
|
+
// eslint-disable-next-line @typescript-eslint/require-await
|
|
590
|
+
async configResolved(resolvedConfig) {
|
|
591
|
+
config = resolvedConfig;
|
|
592
|
+
},
|
|
593
|
+
configureServer(server) {
|
|
594
|
+
const oacEmitter = watchOntologyAsCode({
|
|
595
|
+
watcher: server.watcher,
|
|
596
|
+
logger: server.config.logger,
|
|
597
|
+
ontologyDir
|
|
598
|
+
});
|
|
599
|
+
const middlewareUrl = `http${server.config.server.https ? "s" : ""}://localhost:${server.config.server.port}`;
|
|
600
|
+
const foundryMiddlewareController = new FoundryMiddlewareController(middlewareUrl, `ri.ontology.main.ontology.00000000-0000-0000-0000-000000000000`, oacEmitter, opts?.hooks);
|
|
601
|
+
server.middlewares.use(foundryMiddlewareController.middleware);
|
|
602
|
+
},
|
|
603
|
+
async buildStart() {
|
|
604
|
+
if (config.command === "build") {
|
|
605
|
+
config.logger.info("Generating ontology assets for build...", {
|
|
606
|
+
timestamp: true
|
|
607
|
+
});
|
|
608
|
+
try {
|
|
609
|
+
await generateOntologyAssets({
|
|
610
|
+
logger: config.logger,
|
|
611
|
+
ontologyDir
|
|
612
|
+
});
|
|
613
|
+
config.logger.info("Successfully generated ontology assets for build", {
|
|
614
|
+
timestamp: true
|
|
615
|
+
});
|
|
616
|
+
} catch (error) {
|
|
617
|
+
config.logger.error(`Failed to generate ontology assets: ${error instanceof Error ? error.message : String(error)}`, {
|
|
618
|
+
timestamp: true
|
|
619
|
+
});
|
|
620
|
+
throw error;
|
|
621
|
+
}
|
|
622
|
+
}
|
|
623
|
+
}
|
|
624
|
+
};
|
|
625
|
+
}
|
|
626
|
+
|
|
627
|
+
exports.ontologyAsCode = ontologyAsCode;
|
|
628
|
+
//# sourceMappingURL=index.cjs.map
|
|
629
|
+
//# sourceMappingURL=index.cjs.map
|