@seedprotocol/sdk 0.4.3 → 0.4.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +38 -348
- package/dist/{ArweaveClient-CleX_4Gw.js → ArweaveClient-CgWK-JgT.js} +8 -8
- package/dist/{ArweaveClient-CleX_4Gw.js.map → ArweaveClient-CgWK-JgT.js.map} +1 -1
- package/dist/{ArweaveClient-BvJ1FhQ5.js → ArweaveClient-WcG8CZAE.js} +8 -8
- package/dist/{ArweaveClient-BvJ1FhQ5.js.map → ArweaveClient-WcG8CZAE.js.map} +1 -1
- package/dist/{Db-DX08SxS9.js → Db-DjFdIdR9.js} +9 -16
- package/dist/{Db-DX08SxS9.js.map → Db-DjFdIdR9.js.map} +1 -1
- package/dist/{Db-BPnO1-_p.js → Db-DjofXdeU.js} +9 -9
- package/dist/{Db-BPnO1-_p.js.map → Db-DjofXdeU.js.map} +1 -1
- package/dist/{EasClient-BwhUcPjY.js → EasClient-Aojewp6P.js} +8 -8
- package/dist/{EasClient-CJSs38Db.js.map → EasClient-Aojewp6P.js.map} +1 -1
- package/dist/{EasClient-CJSs38Db.js → EasClient-BVFXp2O6.js} +8 -8
- package/dist/{EasClient-BwhUcPjY.js.map → EasClient-BVFXp2O6.js.map} +1 -1
- package/dist/{FileManager-B1tdLMsX.js → FileManager-C9zr4AJe.js} +8 -8
- package/dist/{FileManager-B1tdLMsX.js.map → FileManager-C9zr4AJe.js.map} +1 -1
- package/dist/{FileManager-Ct91ZhOE.js → FileManager-CxGJLw5C.js} +8 -8
- package/dist/{FileManager-Ct91ZhOE.js.map → FileManager-CxGJLw5C.js.map} +1 -1
- package/dist/Item/Item.d.ts +28 -7
- package/dist/Item/Item.d.ts.map +1 -1
- package/dist/Item/service/actors/runPublish.d.ts +5 -0
- package/dist/Item/service/actors/runPublish.d.ts.map +1 -0
- package/dist/Item/service/itemMachineSingle.d.ts +10 -5
- package/dist/Item/service/itemMachineSingle.d.ts.map +1 -1
- package/dist/ItemProperty/ItemProperty.d.ts +30 -5
- package/dist/ItemProperty/ItemProperty.d.ts.map +1 -1
- package/dist/ItemProperty/service/actors/loadOrCreateProperty.d.ts.map +1 -1
- package/dist/ItemProperty/service/propertyMachine.d.ts +10 -10
- package/dist/ItemProperty/service/propertyMachine.d.ts.map +1 -1
- package/dist/Model/Model.d.ts +27 -20
- package/dist/Model/Model.d.ts.map +1 -1
- package/dist/Model/index.d.ts +1 -1
- package/dist/Model/service/actors/createModelProperties.d.ts.map +1 -1
- package/dist/Model/service/actors/loadOrCreateModel.d.ts.map +1 -1
- package/dist/Model/service/actors/validateModel.d.ts.map +1 -1
- package/dist/Model/service/modelMachine.d.ts +18 -3
- package/dist/Model/service/modelMachine.d.ts.map +1 -1
- package/dist/ModelProperty/ModelProperty.d.ts +25 -2
- package/dist/ModelProperty/ModelProperty.d.ts.map +1 -1
- package/dist/ModelProperty/service/actors/compareAndMarkDraft.d.ts.map +1 -1
- package/dist/ModelProperty/service/actors/saveToSchema.d.ts.map +1 -1
- package/dist/ModelProperty/service/actors/validateProperty.d.ts.map +1 -1
- package/dist/ModelProperty/service/modelPropertyMachine.d.ts +17 -3
- package/dist/ModelProperty/service/modelPropertyMachine.d.ts.map +1 -1
- package/dist/{ModelProperty-Cr3BmgkC.js → ModelProperty-CGdkocQ8.js} +349 -817
- package/dist/ModelProperty-CGdkocQ8.js.map +1 -0
- package/dist/{PathResolver-DJdxE_OK.js → PathResolver-CX6GHoTS.js} +8 -8
- package/dist/{PathResolver-DJdxE_OK.js.map → PathResolver-CX6GHoTS.js.map} +1 -1
- package/dist/{PathResolver-BErmcZqP.js → PathResolver-z_WX47_o.js} +8 -8
- package/dist/{PathResolver-BErmcZqP.js.map → PathResolver-z_WX47_o.js.map} +1 -1
- package/dist/{QueryClient-DIu9c-w6.js → QueryClient-ByKPdRmE.js} +8 -8
- package/dist/{QueryClient-DIu9c-w6.js.map → QueryClient-ByKPdRmE.js.map} +1 -1
- package/dist/{QueryClient-D2mv63gP.js → QueryClient-Cb1iJO-x.js} +8 -8
- package/dist/{QueryClient-D2mv63gP.js.map → QueryClient-Cb1iJO-x.js.map} +1 -1
- package/dist/Schema/Schema.d.ts +24 -3
- package/dist/Schema/Schema.d.ts.map +1 -1
- package/dist/Schema/service/actors/checkExistingSchema.d.ts.map +1 -1
- package/dist/Schema/service/actors/createPropertyInstances.d.ts.map +1 -1
- package/dist/Schema/service/actors/loadOrCreateSchema.d.ts.map +1 -1
- package/dist/Schema/service/actors/verifyPropertyInstancesInCache.d.ts.map +1 -1
- package/dist/Schema/service/actors/writeModelsToDb.d.ts.map +1 -1
- package/dist/Schema/service/actors/writePropertiesToDb.d.ts.map +1 -1
- package/dist/Schema/service/actors/writeSchemaToDb.d.ts.map +1 -1
- package/dist/Schema/service/addModelsMachine.d.ts.map +1 -1
- package/dist/Schema/service/schemaMachine.d.ts +17 -3
- package/dist/Schema/service/schemaMachine.d.ts.map +1 -1
- package/dist/{Schema-DeKabJ0T.js → Schema-D1eqDHyt.js} +995 -186
- package/dist/Schema-D1eqDHyt.js.map +1 -0
- package/dist/{SchemaValidationService-cTlURuDt.js → SchemaValidationService-DyttFaV_.js} +7 -7
- package/dist/{SchemaValidationService-cTlURuDt.js.map → SchemaValidationService-DyttFaV_.js.map} +1 -1
- package/dist/browser/db/Db.d.ts.map +1 -1
- package/dist/browser/react/SeedProvider.d.ts +30 -0
- package/dist/browser/react/SeedProvider.d.ts.map +1 -0
- package/dist/browser/react/index.d.ts +4 -1
- package/dist/browser/react/index.d.ts.map +1 -1
- package/dist/browser/react/item.d.ts +10 -6
- package/dist/browser/react/item.d.ts.map +1 -1
- package/dist/browser/react/itemProperty.d.ts +37 -1
- package/dist/browser/react/itemProperty.d.ts.map +1 -1
- package/dist/browser/react/liveQuery.d.ts.map +1 -1
- package/dist/browser/react/model.d.ts +21 -7
- package/dist/browser/react/model.d.ts.map +1 -1
- package/dist/browser/react/modelProperty.d.ts +23 -0
- package/dist/browser/react/modelProperty.d.ts.map +1 -1
- package/dist/browser/react/queryClient.d.ts +28 -0
- package/dist/browser/react/queryClient.d.ts.map +1 -0
- package/dist/browser/react/schema.d.ts +8 -0
- package/dist/browser/react/schema.d.ts.map +1 -1
- package/dist/browser/react/trash.d.ts +5 -2
- package/dist/browser/react/trash.d.ts.map +1 -1
- package/dist/cjs/{ModelProperty-MkN5Rmx7.js → ModelProperty-BeJvgKMw.js} +377 -477
- package/dist/cjs/ModelProperty-BeJvgKMw.js.map +1 -0
- package/dist/cjs/{Schema-B5cr_JVK.js → Schema-CVs9J6eP.js} +709 -263
- package/dist/cjs/Schema-CVs9J6eP.js.map +1 -0
- package/dist/cjs/{SchemaValidationService-BgIzc3-r.js → SchemaValidationService-CDKcVRFQ.js} +4 -4
- package/dist/cjs/{SchemaValidationService-BgIzc3-r.js.map → SchemaValidationService-CDKcVRFQ.js.map} +1 -1
- package/dist/cjs/{getItem-CVJJPky2.js → getItem-B5RYPvrG.js} +4 -4
- package/dist/cjs/{getItem-CVJJPky2.js.map → getItem-B5RYPvrG.js.map} +1 -1
- package/dist/cjs/{getPublishPayload-DbOc3WA-.js → getPublishPayload-BD1qRob1.js} +26 -11
- package/dist/cjs/getPublishPayload-BD1qRob1.js.map +1 -0
- package/dist/cjs/{getPublishUploads-NzioLz-3.js → getPublishUploads-CnC9aYxs.js} +5 -5
- package/dist/cjs/getPublishUploads-CnC9aYxs.js.map +1 -0
- package/dist/cjs/{getSegmentedItemProperties-BsaklLwI.js → getSegmentedItemProperties-B_njnntx.js} +2 -2
- package/dist/cjs/{getSegmentedItemProperties-BsaklLwI.js.map → getSegmentedItemProperties-B_njnntx.js.map} +1 -1
- package/dist/cjs/{index-BmIVfqGN.js → index-BeKPbbk0.js} +12715 -12384
- package/dist/cjs/index-BeKPbbk0.js.map +1 -0
- package/dist/cjs/{index-C_0angRB.js → index-Dnywap_P.js} +4 -4
- package/dist/cjs/index-Dnywap_P.js.map +1 -0
- package/dist/client/actors/platformClassesInit.d.ts.map +1 -1
- package/dist/client/actors/processSchemaFiles.d.ts.map +1 -1
- package/dist/client/actors/saveAppState.d.ts.map +1 -1
- package/dist/db/read/getItemData.d.ts.map +1 -1
- package/dist/db/read/getItems.d.ts.map +1 -1
- package/dist/db/read/getModelPropertiesData.d.ts +19 -0
- package/dist/db/read/getModelPropertiesData.d.ts.map +1 -0
- package/dist/db/read/getModelsData.d.ts +15 -0
- package/dist/db/read/getModelsData.d.ts.map +1 -0
- package/dist/db/read/getPublishPayload.d.ts.map +1 -1
- package/dist/db/read/getPublishUploads.d.ts +1 -7
- package/dist/db/read/getPublishUploads.d.ts.map +1 -1
- package/dist/db/read/getSchemaUidForModel.d.ts.map +1 -1
- package/dist/db/write/updateSeedUid.d.ts +7 -0
- package/dist/db/write/updateSeedUid.d.ts.map +1 -0
- package/dist/eas.d.ts.map +1 -1
- package/dist/events/item/index.d.ts.map +1 -1
- package/dist/events/item/syncDbWithEas.d.ts.map +1 -1
- package/dist/{getItem-CcttmUY_.js → getItem-BB5HBCbK.js} +8 -8
- package/dist/{getItem-CcttmUY_.js.map → getItem-BB5HBCbK.js.map} +1 -1
- package/dist/{getPublishPayload-NFpqbd_H.js → getPublishPayload-uLm0AqN_.js} +29 -14
- package/dist/getPublishPayload-uLm0AqN_.js.map +1 -0
- package/dist/{getPublishUploads-Cpb9vgwE.js → getPublishUploads-Dc-HqhO8.js} +9 -9
- package/dist/getPublishUploads-Dc-HqhO8.js.map +1 -0
- package/dist/{getSegmentedItemProperties-DiyQPMgI.js → getSegmentedItemProperties-BrIqFNfD.js} +2 -2
- package/dist/{getSegmentedItemProperties-DiyQPMgI.js.map → getSegmentedItemProperties-BrIqFNfD.js.map} +1 -1
- package/dist/helpers/db.d.ts +12 -0
- package/dist/helpers/db.d.ts.map +1 -1
- package/dist/helpers/entity/entityDestroy.d.ts +41 -0
- package/dist/helpers/entity/entityDestroy.d.ts.map +1 -0
- package/dist/helpers/entity/index.d.ts +1 -0
- package/dist/helpers/entity/index.d.ts.map +1 -1
- package/dist/helpers/index.d.ts +1 -0
- package/dist/helpers/index.d.ts.map +1 -1
- package/dist/helpers/property/index.d.ts +12 -12
- package/dist/helpers/property/index.d.ts.map +1 -1
- package/dist/helpers/reactiveProxy.d.ts.map +1 -1
- package/dist/helpers/schema.d.ts.map +1 -1
- package/dist/helpers/updateSchema.d.ts +9 -0
- package/dist/helpers/updateSchema.d.ts.map +1 -1
- package/dist/helpers/waitForEntityIdle.d.ts +2 -2
- package/dist/helpers/waitForEntityIdle.d.ts.map +1 -1
- package/dist/imports/json.d.ts.map +1 -1
- package/dist/{index-r45w9hEq.js → index-2FcQHgKp.js} +2 -2
- package/dist/index-2FcQHgKp.js.map +1 -0
- package/dist/{json-I3vJhXo8.js → index-DPll6EAp.js} +12450 -12121
- package/dist/index-DPll6EAp.js.map +1 -0
- package/dist/{index-CRuq6HVi.js → index-LEY0Og1p.js} +9 -9
- package/dist/index-LEY0Og1p.js.map +1 -0
- package/dist/index.d.ts +3 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/interfaces/IItem.d.ts +2 -0
- package/dist/interfaces/IItem.d.ts.map +1 -1
- package/dist/interfaces/IItemProperty.d.ts +1 -0
- package/dist/interfaces/IItemProperty.d.ts.map +1 -1
- package/dist/main.cjs +3 -3
- package/dist/main.js +999 -1033
- package/dist/main.js.map +1 -1
- package/dist/node.js +16 -16
- package/dist/node.js.map +1 -1
- package/dist/{property-Dy09KTxg.js → property-B15X7jLX.js} +7 -5
- package/dist/property-B15X7jLX.js.map +1 -0
- package/dist/{queries-LZYSuhtz.js → queries-BPDSpiEX.js} +2 -2
- package/dist/{queries-LZYSuhtz.js.map → queries-BPDSpiEX.js.map} +1 -1
- package/dist/services/write/actors/validateEntity.d.ts.map +1 -1
- package/dist/services/write/actors/writeToDatabase.d.ts.map +1 -1
- package/dist/services/write/writeProcessMachine.d.ts +1 -1
- package/dist/types/index.d.ts +9 -0
- package/dist/types/index.d.ts.map +1 -1
- package/dist/types/item.d.ts +12 -0
- package/dist/types/item.d.ts.map +1 -1
- package/dist/types/property.d.ts +6 -0
- package/dist/types/property.d.ts.map +1 -1
- package/dist/types/publish.d.ts +9 -0
- package/dist/types/publish.d.ts.map +1 -0
- package/package.json +12 -4
- package/dist/ModelProperty-Cr3BmgkC.js.map +0 -1
- package/dist/Schema-DeKabJ0T.js.map +0 -1
- package/dist/cjs/ModelProperty-MkN5Rmx7.js.map +0 -1
- package/dist/cjs/Schema-B5cr_JVK.js.map +0 -1
- package/dist/cjs/getPublishPayload-DbOc3WA-.js.map +0 -1
- package/dist/cjs/getPublishUploads-NzioLz-3.js.map +0 -1
- package/dist/cjs/index-BmIVfqGN.js.map +0 -1
- package/dist/cjs/index-C_0angRB.js.map +0 -1
- package/dist/events/item/publish.d.ts +0 -7
- package/dist/events/item/publish.d.ts.map +0 -1
- package/dist/getPublishPayload-NFpqbd_H.js.map +0 -1
- package/dist/getPublishUploads-Cpb9vgwE.js.map +0 -1
- package/dist/index-CRuq6HVi.js.map +0 -1
- package/dist/index-r45w9hEq.js.map +0 -1
- package/dist/json-I3vJhXo8.js.map +0 -1
- package/dist/property-Dy09KTxg.js.map +0 -1
|
@@ -1,22 +1,737 @@
|
|
|
1
1
|
import { fromCallback, setup, assign, createActor } from 'xstate';
|
|
2
|
-
import {
|
|
2
|
+
import { a as ModelPropertyDataTypes, aG as getLatestSchemaVersion, x as BaseFileManager, p as BaseDb, q as models, W as schemas, ae as generateId, aH as createModelsFromJson, aa as loadSchemaFromFile, aI as SEED_PROTOCOL_SCHEMA_NAME, aJ as addSchemaToDb, a2 as addModelsToDb, G as modelSchemas, r as properties, aE as listCompleteSchemaFiles, aK as loadModelsFromDbForSchema, aL as isInternalSchema, aM as createModelFromJson, M as Model, ad as writeProcessMachine, af as waitForEntityIdle, ag as createReactiveProxy, ah as findEntity, aq as loadAllSchemasFromDb, aN as ConflictError, am as getClient, an as ClientManagerState, aO as unloadEntity, aP as clearDestroySubscriptions, ai as forceRemoveFromCaches, aj as runDestroyLifecycle, aQ as setupEntityLiveQuery } from './index-DPll6EAp.js';
|
|
3
3
|
import { eq, desc, and } from 'drizzle-orm';
|
|
4
4
|
import debug from 'debug';
|
|
5
|
-
import { SchemaValidationService } from './SchemaValidationService-
|
|
6
|
-
import { M as ModelProperty, c as convertPropertyToSchemaUpdate, u as updateModelProperties } from './ModelProperty-Cr3BmgkC.js';
|
|
5
|
+
import { SchemaValidationService } from './SchemaValidationService-DyttFaV_.js';
|
|
7
6
|
import 'pluralize';
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
7
|
+
|
|
8
|
+
const logger$d = debug('seedSdk:helpers:updateSchema');
|
|
9
|
+
/**
|
|
10
|
+
* Get the file path for a schema file
|
|
11
|
+
*/
|
|
12
|
+
/**
|
|
13
|
+
* Sanitize a schema name to be filesystem-safe
|
|
14
|
+
* Replaces all special characters (except alphanumeric, hyphens, underscores) with underscores
|
|
15
|
+
* Converts spaces to underscores
|
|
16
|
+
* Removes leading/trailing underscores
|
|
17
|
+
*
|
|
18
|
+
* @param name - Schema name to sanitize
|
|
19
|
+
* @returns Sanitized name safe for use in filenames
|
|
20
|
+
*/
|
|
21
|
+
const sanitizeSchemaName$1 = (name) => {
|
|
22
|
+
return name
|
|
23
|
+
.replace(/[^a-zA-Z0-9\s_-]/g, '_') // Replace special chars (except spaces, hyphens, underscores) with underscore
|
|
24
|
+
.replace(/\s+/g, '_') // Convert spaces to underscores
|
|
25
|
+
.replace(/^_+|_+$/g, '') // Remove leading/trailing underscores
|
|
26
|
+
.replace(/_+/g, '_'); // Collapse multiple underscores to single
|
|
27
|
+
};
|
|
28
|
+
/**
|
|
29
|
+
* Get the full file path for a schema
|
|
30
|
+
* Format: {schemaFileId}_{schemaName}_v{version}.json
|
|
31
|
+
*
|
|
32
|
+
* The ID-first format ensures all files for a schema group together when sorted alphabetically.
|
|
33
|
+
*
|
|
34
|
+
* @param name - Schema name
|
|
35
|
+
* @param version - Schema version
|
|
36
|
+
* @param schemaFileId - Schema file ID (required)
|
|
37
|
+
*/
|
|
38
|
+
const getSchemaFilePath$2 = (name, version, schemaFileId) => {
|
|
39
|
+
const path = BaseFileManager.getPathModule();
|
|
40
|
+
const workingDir = BaseFileManager.getWorkingDir();
|
|
41
|
+
const sanitizedName = sanitizeSchemaName$1(name);
|
|
42
|
+
const filename = schemaFileId
|
|
43
|
+
? `${schemaFileId}_${sanitizedName}_v${version}.json`
|
|
44
|
+
: `${sanitizedName}_v${version}.json`;
|
|
45
|
+
return path.join(workingDir, filename);
|
|
46
|
+
};
|
|
47
|
+
/**
|
|
48
|
+
* Get schemaFileId from database for a schema
|
|
49
|
+
* @param schemaName - Schema name
|
|
50
|
+
* @returns Schema file ID
|
|
51
|
+
* @throws Error if schema not found or missing schemaFileId
|
|
52
|
+
*/
|
|
53
|
+
async function getSchemaFileId(schemaName) {
|
|
54
|
+
const db = BaseDb.getAppDb();
|
|
55
|
+
if (!db)
|
|
56
|
+
throw new Error('Database not available');
|
|
57
|
+
const dbSchema = await db
|
|
58
|
+
.select()
|
|
59
|
+
.from(schemas)
|
|
60
|
+
.where(eq(schemas.name, schemaName))
|
|
61
|
+
.orderBy(desc(schemas.version))
|
|
62
|
+
.limit(1);
|
|
63
|
+
if (dbSchema.length === 0 || !dbSchema[0].schemaFileId) {
|
|
64
|
+
throw new Error(`Schema ${schemaName} not found in database or missing schemaFileId`);
|
|
65
|
+
}
|
|
66
|
+
return dbSchema[0].schemaFileId;
|
|
67
|
+
}
|
|
68
|
+
/**
|
|
69
|
+
* Write the full schema to a new version file (e.g. when new models were added).
|
|
70
|
+
* Used when _editedProperties contains 'schema:models' and there are no property-level updates.
|
|
71
|
+
* @param schemaName - Schema name
|
|
72
|
+
* @param schema - Full schema object (e.g. from _buildModelsFromInstances)
|
|
73
|
+
* @returns The file path of the new schema version
|
|
74
|
+
*/
|
|
75
|
+
async function writeFullSchemaNewVersion(schemaName, schema) {
|
|
76
|
+
const latestVersion = await getLatestSchemaVersion(schemaName);
|
|
77
|
+
const newVersion = latestVersion + 1;
|
|
78
|
+
const schemaWithNewVersion = {
|
|
79
|
+
...schema,
|
|
80
|
+
version: newVersion,
|
|
81
|
+
metadata: {
|
|
82
|
+
...schema.metadata,
|
|
83
|
+
updatedAt: new Date().toISOString(),
|
|
84
|
+
},
|
|
85
|
+
migrations: [
|
|
86
|
+
...(schema.migrations || []),
|
|
87
|
+
{
|
|
88
|
+
version: newVersion,
|
|
89
|
+
timestamp: new Date().toISOString(),
|
|
90
|
+
description: 'New schema version (e.g. new models added)',
|
|
91
|
+
changes: [{ type: 'full_schema_write' }],
|
|
92
|
+
},
|
|
93
|
+
],
|
|
94
|
+
};
|
|
95
|
+
const newFilePath = getSchemaFilePath$2(schemaName, newVersion, schema.id ?? (await getSchemaFileId(schemaName)));
|
|
96
|
+
const newContent = JSON.stringify(schemaWithNewVersion, null, 2);
|
|
97
|
+
await BaseFileManager.saveFile(newFilePath, newContent);
|
|
98
|
+
await BaseFileManager.waitForFileWithContent(newFilePath);
|
|
99
|
+
logger$d(`Created new schema version ${newVersion} for ${schemaName} at ${newFilePath}`);
|
|
100
|
+
return newFilePath;
|
|
101
|
+
}
|
|
102
|
+
/**
|
|
103
|
+
* Get model name from modelId
|
|
104
|
+
* @param modelId - The model ID to look up
|
|
105
|
+
* @returns The model name, or undefined if not found
|
|
106
|
+
*/
|
|
107
|
+
async function getModelNameFromId(modelId) {
|
|
108
|
+
if (!modelId) {
|
|
109
|
+
return undefined;
|
|
110
|
+
}
|
|
111
|
+
const db = BaseDb.getAppDb();
|
|
112
|
+
if (!db) {
|
|
113
|
+
throw new Error('Database not found');
|
|
114
|
+
}
|
|
115
|
+
const modelRecords = await db
|
|
116
|
+
.select()
|
|
117
|
+
.from(models)
|
|
118
|
+
.where(eq(models.id, modelId))
|
|
119
|
+
.limit(1);
|
|
120
|
+
if (modelRecords.length === 0) {
|
|
121
|
+
return undefined;
|
|
122
|
+
}
|
|
123
|
+
return modelRecords[0].name;
|
|
124
|
+
}
|
|
125
|
+
/**
|
|
126
|
+
* Convert a TProperty/ModelPropertyMachineContext to SchemaPropertyUpdate format
|
|
127
|
+
* This function converts the internal property representation to the schema file format
|
|
128
|
+
* @param property - The TProperty instance to convert
|
|
129
|
+
* @param modelName - The name of the model this property belongs to
|
|
130
|
+
* @param propertyName - The name of the property
|
|
131
|
+
* @returns A SchemaPropertyUpdate object ready to be passed to updateModelProperties
|
|
132
|
+
*/
|
|
133
|
+
async function convertPropertyToSchemaUpdate(property, modelName, propertyName) {
|
|
134
|
+
const updates = {};
|
|
135
|
+
// Convert dataType to type
|
|
136
|
+
if (property.dataType) {
|
|
137
|
+
updates.type = property.dataType;
|
|
138
|
+
}
|
|
139
|
+
// Handle Relation type
|
|
140
|
+
if (property.dataType === ModelPropertyDataTypes.Relation) {
|
|
141
|
+
if (property.ref) {
|
|
142
|
+
updates.model = property.ref;
|
|
143
|
+
}
|
|
144
|
+
else if (property.refModelId) {
|
|
145
|
+
// If ref is not set but refModelId is, get the model name from the database
|
|
146
|
+
const refModelName = await getModelNameFromId(property.refModelId);
|
|
147
|
+
if (refModelName) {
|
|
148
|
+
updates.model = refModelName;
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
// Handle List type
|
|
153
|
+
if (property.dataType === ModelPropertyDataTypes.List) {
|
|
154
|
+
if (property.refValueType) {
|
|
155
|
+
updates.items = {
|
|
156
|
+
type: property.refValueType,
|
|
157
|
+
};
|
|
158
|
+
if (property.ref) {
|
|
159
|
+
updates.items.model = property.ref;
|
|
160
|
+
}
|
|
161
|
+
else if (property.refModelId) {
|
|
162
|
+
// If ref is not set but refModelId is, get the model name from the database
|
|
163
|
+
const refModelName = await getModelNameFromId(property.refModelId);
|
|
164
|
+
if (refModelName) {
|
|
165
|
+
updates.items.model = refModelName;
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
// Handle storage configuration (for Text properties with storage)
|
|
171
|
+
if (property.storageType || property.localStorageDir || property.filenameSuffix) {
|
|
172
|
+
const storageType = property.storageType || 'ItemStorage'; // Default to ItemStorage if not specified
|
|
173
|
+
updates.storage = {
|
|
174
|
+
type: storageType,
|
|
175
|
+
};
|
|
176
|
+
if (property.localStorageDir) {
|
|
177
|
+
updates.storage.path = property.localStorageDir;
|
|
178
|
+
}
|
|
179
|
+
if (property.filenameSuffix) {
|
|
180
|
+
updates.storage.extension = property.filenameSuffix;
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
return {
|
|
184
|
+
modelName,
|
|
185
|
+
propertyName,
|
|
186
|
+
updates,
|
|
187
|
+
};
|
|
188
|
+
}
|
|
189
|
+
/**
|
|
190
|
+
* Update model properties in a schema and create a new version
|
|
191
|
+
* @param schemaName - The name of the schema to update
|
|
192
|
+
* @param propertyUpdates - Array of property updates to apply
|
|
193
|
+
* @param modelUpdates - Optional array of model renames
|
|
194
|
+
* @returns The file path of the new schema version
|
|
195
|
+
* @throws Error if schema not found or updates are invalid
|
|
196
|
+
*/
|
|
197
|
+
async function updateModelProperties(schemaName, propertyUpdates, modelUpdates) {
|
|
198
|
+
// Get the latest version of the schema
|
|
199
|
+
const latestVersion = await getLatestSchemaVersion(schemaName);
|
|
200
|
+
if (latestVersion === 0) {
|
|
201
|
+
throw new Error(`Schema ${schemaName} not found`);
|
|
202
|
+
}
|
|
203
|
+
// Get schemaFileId from database
|
|
204
|
+
const schemaFileId = await getSchemaFileId(schemaName);
|
|
205
|
+
// Load the latest schema file
|
|
206
|
+
const latestFilePath = getSchemaFilePath$2(schemaName, latestVersion, schemaFileId);
|
|
207
|
+
const content = await BaseFileManager.readFileAsString(latestFilePath);
|
|
208
|
+
const schemaFile = JSON.parse(content);
|
|
209
|
+
if (!schemaFile.$schema) {
|
|
210
|
+
throw new Error(`Schema file ${latestFilePath} is not a complete schema file`);
|
|
211
|
+
}
|
|
212
|
+
// Create a copy of the schema for the new version
|
|
213
|
+
// Preserve schema ID and all model/property IDs
|
|
214
|
+
const newVersion = latestVersion + 1;
|
|
215
|
+
const updatedSchema = {
|
|
216
|
+
...schemaFile,
|
|
217
|
+
version: newVersion,
|
|
218
|
+
// Preserve schema ID from previous version
|
|
219
|
+
id: schemaFile.id,
|
|
220
|
+
metadata: {
|
|
221
|
+
...schemaFile.metadata,
|
|
222
|
+
updatedAt: new Date().toISOString(),
|
|
223
|
+
},
|
|
224
|
+
// Deep copy models to preserve IDs
|
|
225
|
+
models: Object.fromEntries(Object.entries(schemaFile.models).map(([modelName, model]) => [
|
|
226
|
+
modelName,
|
|
227
|
+
{
|
|
228
|
+
...model,
|
|
229
|
+
// Preserve model ID
|
|
230
|
+
id: model.id,
|
|
231
|
+
// Deep copy properties to preserve IDs
|
|
232
|
+
properties: Object.fromEntries(Object.entries(model.properties).map(([propName, prop]) => [
|
|
233
|
+
propName,
|
|
234
|
+
{
|
|
235
|
+
...prop,
|
|
236
|
+
// Preserve property ID
|
|
237
|
+
id: prop.id,
|
|
238
|
+
},
|
|
239
|
+
])),
|
|
240
|
+
},
|
|
241
|
+
])),
|
|
242
|
+
migrations: [
|
|
243
|
+
...schemaFile.migrations,
|
|
244
|
+
{
|
|
245
|
+
version: newVersion,
|
|
246
|
+
timestamp: new Date().toISOString(),
|
|
247
|
+
description: `Updated model properties: ${propertyUpdates.map(u => `${u.modelName}.${u.propertyName}`).join(', ')}`,
|
|
248
|
+
changes: propertyUpdates.map(update => ({
|
|
249
|
+
type: 'property_update',
|
|
250
|
+
modelName: update.modelName,
|
|
251
|
+
propertyName: update.propertyName,
|
|
252
|
+
updates: update.updates,
|
|
253
|
+
})),
|
|
254
|
+
},
|
|
255
|
+
],
|
|
256
|
+
};
|
|
257
|
+
// Apply model renames first (if any)
|
|
258
|
+
if (modelUpdates && modelUpdates.length > 0) {
|
|
259
|
+
for (const modelUpdate of modelUpdates) {
|
|
260
|
+
if (updatedSchema.models[modelUpdate.oldName]) {
|
|
261
|
+
// Rename the model in the models object
|
|
262
|
+
updatedSchema.models[modelUpdate.newName] = updatedSchema.models[modelUpdate.oldName];
|
|
263
|
+
delete updatedSchema.models[modelUpdate.oldName];
|
|
264
|
+
// Update any property references to this model
|
|
265
|
+
for (const modelName in updatedSchema.models) {
|
|
266
|
+
const model = updatedSchema.models[modelName];
|
|
267
|
+
for (const propertyName in model.properties) {
|
|
268
|
+
const property = model.properties[propertyName];
|
|
269
|
+
if (property.model === modelUpdate.oldName) {
|
|
270
|
+
property.model = modelUpdate.newName;
|
|
271
|
+
}
|
|
272
|
+
if (property.items?.model === modelUpdate.oldName) {
|
|
273
|
+
property.items.model = modelUpdate.newName;
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
// Add to migration changes
|
|
278
|
+
updatedSchema.migrations[updatedSchema.migrations.length - 1].changes.push({
|
|
279
|
+
type: 'model_rename',
|
|
280
|
+
oldName: modelUpdate.oldName,
|
|
281
|
+
newName: modelUpdate.newName,
|
|
282
|
+
});
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
// Apply property updates
|
|
287
|
+
for (const update of propertyUpdates) {
|
|
288
|
+
const model = updatedSchema.models[update.modelName];
|
|
289
|
+
if (!model) {
|
|
290
|
+
throw new Error(`Model ${update.modelName} not found in schema ${schemaName}`);
|
|
291
|
+
}
|
|
292
|
+
if (!model.properties[update.propertyName]) {
|
|
293
|
+
throw new Error(`Property ${update.propertyName} not found in model ${update.modelName} of schema ${schemaName}`);
|
|
294
|
+
}
|
|
295
|
+
// Update the property with new values
|
|
296
|
+
const property = model.properties[update.propertyName];
|
|
297
|
+
Object.assign(property, update.updates);
|
|
298
|
+
}
|
|
299
|
+
// Write the new schema version to file using ID-based naming (preferred)
|
|
300
|
+
const newFilePath = getSchemaFilePath$2(schemaName, newVersion, updatedSchema.id ?? undefined);
|
|
301
|
+
const newContent = JSON.stringify(updatedSchema, null, 2);
|
|
302
|
+
await BaseFileManager.saveFile(newFilePath, newContent);
|
|
303
|
+
// Wait for the file to be available with content (important for browser/OPFS where writes may not be immediately readable)
|
|
304
|
+
await BaseFileManager.waitForFileWithContent(newFilePath);
|
|
305
|
+
logger$d(`Created new schema version ${newVersion} for ${schemaName} at ${newFilePath}`);
|
|
306
|
+
// Load the new schema file to process models and add them to the database
|
|
307
|
+
// Extract model renames from migrations to pass to database update
|
|
308
|
+
const modelRenames = new Map();
|
|
309
|
+
const latestMigration = updatedSchema.migrations[updatedSchema.migrations.length - 1];
|
|
310
|
+
for (const change of latestMigration.changes) {
|
|
311
|
+
if (change.type === 'model_rename') {
|
|
312
|
+
modelRenames.set(change.oldName, change.newName);
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
// Load schema with model renames handled
|
|
316
|
+
await loadSchemaWithRenames(newFilePath, modelRenames);
|
|
317
|
+
return newFilePath;
|
|
318
|
+
}
|
|
319
|
+
/**
|
|
320
|
+
* Load a schema file and handle model renames in the database
|
|
321
|
+
* This is a helper function that processes model renames before loading
|
|
322
|
+
*/
|
|
323
|
+
async function loadSchemaWithRenames(schemaFilePath, modelRenames) {
|
|
324
|
+
const content = await BaseFileManager.readFileAsString(schemaFilePath);
|
|
325
|
+
const schemaFile = JSON.parse(content);
|
|
326
|
+
if (!schemaFile.$schema) {
|
|
327
|
+
throw new Error(`File ${schemaFilePath} is not a complete schema file (missing $schema field).`);
|
|
328
|
+
}
|
|
329
|
+
const schemaName = schemaFile.metadata?.name;
|
|
330
|
+
const version = schemaFile.version;
|
|
331
|
+
if (!schemaName) {
|
|
332
|
+
throw new Error('Schema name is required in metadata.name');
|
|
333
|
+
}
|
|
334
|
+
// Convert to JsonImportSchema format for processing
|
|
335
|
+
// Remove id fields for JsonImportSchema format (they're not part of the import format)
|
|
336
|
+
const importData = {
|
|
337
|
+
name: schemaName,
|
|
338
|
+
models: Object.fromEntries(Object.entries(schemaFile.models).map(([modelName, model]) => [
|
|
339
|
+
modelName,
|
|
340
|
+
{
|
|
341
|
+
description: model.description,
|
|
342
|
+
properties: Object.fromEntries(Object.entries(model.properties).map(([propName, prop]) => [
|
|
343
|
+
propName,
|
|
344
|
+
{
|
|
345
|
+
type: prop.type || 'string', // Ensure type is present
|
|
346
|
+
...prop,
|
|
347
|
+
// Remove id field for import format (not part of JsonImportSchema)
|
|
348
|
+
},
|
|
349
|
+
])),
|
|
350
|
+
indexes: model.indexes,
|
|
351
|
+
},
|
|
352
|
+
])),
|
|
353
|
+
};
|
|
354
|
+
// Generate schema ID if missing
|
|
355
|
+
if (!schemaFile.id) {
|
|
356
|
+
schemaFile.id = generateId();
|
|
357
|
+
logger$d('Generated schema ID for schema:', schemaFile.id);
|
|
358
|
+
}
|
|
359
|
+
// Extract schemaFileIds from JSON file and generate missing ones BEFORE creating models
|
|
360
|
+
// This ensures Model instances are created with correct IDs
|
|
361
|
+
const modelFileIds = new Map();
|
|
362
|
+
const propertyFileIds = new Map();
|
|
363
|
+
for (const [modelName, model] of Object.entries(schemaFile.models)) {
|
|
364
|
+
// Generate model ID if missing
|
|
365
|
+
if (!model.id) {
|
|
366
|
+
model.id = generateId();
|
|
367
|
+
logger$d(`Generated model ID for ${modelName}:`, model.id);
|
|
368
|
+
}
|
|
369
|
+
modelFileIds.set(modelName, model.id);
|
|
370
|
+
const propIds = new Map();
|
|
371
|
+
for (const [propName, prop] of Object.entries(model.properties)) {
|
|
372
|
+
// Generate property ID if missing
|
|
373
|
+
if (!prop.id) {
|
|
374
|
+
prop.id = generateId();
|
|
375
|
+
logger$d(`Generated property ID for ${modelName}.${propName}:`, prop.id);
|
|
376
|
+
}
|
|
377
|
+
propIds.set(propName, prop.id);
|
|
378
|
+
}
|
|
379
|
+
if (propIds.size > 0) {
|
|
380
|
+
propertyFileIds.set(modelName, propIds);
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
// Convert JSON models to Model classes, passing modelFileIds and propertyFileIds so Model instances use correct IDs
|
|
384
|
+
const modelDefinitions = await createModelsFromJson(importData, modelFileIds, propertyFileIds);
|
|
385
|
+
// Convert schema file metadata to schema input for database
|
|
386
|
+
const schemaInput = {
|
|
387
|
+
name: schemaName,
|
|
388
|
+
version,
|
|
389
|
+
schemaFileId: schemaFile.id || null,
|
|
390
|
+
schemaData: null,
|
|
391
|
+
isDraft: false,
|
|
392
|
+
isEdited: false,
|
|
393
|
+
createdAt: new Date(schemaFile.metadata.createdAt).getTime(),
|
|
394
|
+
updatedAt: new Date(schemaFile.metadata.updatedAt).getTime(),
|
|
395
|
+
};
|
|
396
|
+
// Use dynamic import to break circular dependency
|
|
397
|
+
const { addSchemaToDb, addModelsToDb } = await import('./index-DPll6EAp.js').then(function (n) { return n.aT; });
|
|
398
|
+
// Add schema to database with schemaFileId
|
|
399
|
+
const schemaRecord = await addSchemaToDb(schemaInput, schemaFile.id);
|
|
400
|
+
// Add models to database with model renames handled and schemaFileIds
|
|
401
|
+
await addModelsToDb(modelDefinitions, schemaRecord, modelRenames, {
|
|
402
|
+
schemaFileId: schemaFile.id,
|
|
403
|
+
modelFileIds,
|
|
404
|
+
propertyFileIds,
|
|
405
|
+
});
|
|
406
|
+
// Models are now Model instances, no registration needed
|
|
407
|
+
// They should be created via Model.create() and are accessible via Model static methods
|
|
408
|
+
for (const [modelName] of Object.entries(modelDefinitions)) {
|
|
409
|
+
logger$d('loadSchemaWithRenames - model available:', modelName);
|
|
410
|
+
}
|
|
411
|
+
return schemaFilePath;
|
|
412
|
+
}
|
|
413
|
+
/**
|
|
414
|
+
* Rename a property in a model
|
|
415
|
+
* This is a convenience function that updates the property name
|
|
416
|
+
* Note: This creates a new property and you may want to handle the old property separately
|
|
417
|
+
* @param schemaName - The name of the schema
|
|
418
|
+
* @param modelName - The name of the model
|
|
419
|
+
* @param oldPropertyName - The current property name
|
|
420
|
+
* @param newPropertyName - The new property name
|
|
421
|
+
* @returns The file path of the new schema version
|
|
422
|
+
*/
|
|
423
|
+
async function renameModelProperty(schemaName, modelName, oldPropertyName, newPropertyName) {
|
|
424
|
+
// Get the latest version
|
|
425
|
+
const latestVersion = await getLatestSchemaVersion(schemaName);
|
|
426
|
+
if (latestVersion === 0) {
|
|
427
|
+
throw new Error(`Schema ${schemaName} not found`);
|
|
428
|
+
}
|
|
429
|
+
// Get schemaFileId from database
|
|
430
|
+
const schemaFileId = await getSchemaFileId(schemaName);
|
|
431
|
+
// Load the latest schema file
|
|
432
|
+
const latestFilePath = getSchemaFilePath$2(schemaName, latestVersion, schemaFileId);
|
|
433
|
+
const content = await BaseFileManager.readFileAsString(latestFilePath);
|
|
434
|
+
const schemaFile = JSON.parse(content);
|
|
435
|
+
if (!schemaFile.$schema) {
|
|
436
|
+
throw new Error(`Schema file ${latestFilePath} is not a complete schema file`);
|
|
437
|
+
}
|
|
438
|
+
const model = schemaFile.models[modelName];
|
|
439
|
+
if (!model) {
|
|
440
|
+
throw new Error(`Model ${modelName} not found in schema ${schemaName}`);
|
|
441
|
+
}
|
|
442
|
+
if (!model.properties[oldPropertyName]) {
|
|
443
|
+
throw new Error(`Property ${oldPropertyName} not found in model ${modelName} of schema ${schemaName}`);
|
|
444
|
+
}
|
|
445
|
+
if (model.properties[newPropertyName]) {
|
|
446
|
+
throw new Error(`Property ${newPropertyName} already exists in model ${modelName} of schema ${schemaName}`);
|
|
447
|
+
}
|
|
448
|
+
// Create new version with renamed property
|
|
449
|
+
// Preserve all IDs - when renaming, we keep the same property ID
|
|
450
|
+
const newVersion = latestVersion + 1;
|
|
451
|
+
const oldProperty = model.properties[oldPropertyName];
|
|
452
|
+
const updatedSchema = {
|
|
453
|
+
...schemaFile,
|
|
454
|
+
version: newVersion,
|
|
455
|
+
id: schemaFile.id, // Preserve schema ID
|
|
456
|
+
metadata: {
|
|
457
|
+
...schemaFile.metadata,
|
|
458
|
+
updatedAt: new Date().toISOString(),
|
|
459
|
+
},
|
|
460
|
+
models: {
|
|
461
|
+
...schemaFile.models,
|
|
462
|
+
[modelName]: {
|
|
463
|
+
...model,
|
|
464
|
+
id: model.id, // Preserve model ID
|
|
465
|
+
properties: {
|
|
466
|
+
...Object.fromEntries(Object.entries(model.properties)
|
|
467
|
+
.filter(([name]) => name !== oldPropertyName)
|
|
468
|
+
.map(([name, prop]) => [name, { ...prop, id: prop.id }])),
|
|
469
|
+
[newPropertyName]: {
|
|
470
|
+
...oldProperty,
|
|
471
|
+
id: oldProperty.id, // Preserve property ID when renaming
|
|
472
|
+
},
|
|
473
|
+
},
|
|
474
|
+
},
|
|
475
|
+
},
|
|
476
|
+
migrations: [
|
|
477
|
+
...schemaFile.migrations,
|
|
478
|
+
{
|
|
479
|
+
version: newVersion,
|
|
480
|
+
timestamp: new Date().toISOString(),
|
|
481
|
+
description: `Renamed property ${modelName}.${oldPropertyName} to ${newPropertyName}`,
|
|
482
|
+
changes: [
|
|
483
|
+
{
|
|
484
|
+
type: 'property_rename',
|
|
485
|
+
modelName,
|
|
486
|
+
oldPropertyName,
|
|
487
|
+
newPropertyName,
|
|
488
|
+
},
|
|
489
|
+
],
|
|
490
|
+
},
|
|
491
|
+
],
|
|
492
|
+
};
|
|
493
|
+
// Remove the old property name
|
|
494
|
+
delete updatedSchema.models[modelName].properties[oldPropertyName];
|
|
495
|
+
// Write the new schema version using ID-based naming (preferred)
|
|
496
|
+
const newFilePath = getSchemaFilePath$2(schemaName, newVersion, updatedSchema.id ?? undefined);
|
|
497
|
+
const newContent = JSON.stringify(updatedSchema, null, 2);
|
|
498
|
+
// Ensure the directory exists before saving
|
|
499
|
+
const path = BaseFileManager.getPathModule();
|
|
500
|
+
const dir = path.dirname(newFilePath);
|
|
501
|
+
await BaseFileManager.createDirIfNotExists(dir);
|
|
502
|
+
await BaseFileManager.saveFile(newFilePath, newContent);
|
|
503
|
+
// Wait for the file to be available with content (important for browser/OPFS where writes may not be immediately readable)
|
|
504
|
+
await BaseFileManager.waitForFileWithContent(newFilePath);
|
|
505
|
+
logger$d(`Renamed property ${oldPropertyName} to ${newPropertyName} in schema ${schemaName} v${newVersion}`);
|
|
506
|
+
// Load the new schema file
|
|
507
|
+
await loadSchemaFromFile(newFilePath);
|
|
508
|
+
return newFilePath;
|
|
509
|
+
}
|
|
510
|
+
/**
|
|
511
|
+
* Delete a model from a schema
|
|
512
|
+
* @param schemaName - The name of the schema
|
|
513
|
+
* @param modelName - The name of the model to delete
|
|
514
|
+
* @param options - Optional deletion options
|
|
515
|
+
* @returns The file path of the new schema version
|
|
516
|
+
* @throws Error if schema or model not found
|
|
517
|
+
*/
|
|
518
|
+
async function deleteModelFromSchema(schemaName, modelName, options = {}) {
|
|
519
|
+
const { removeReferencingProperties = false } = options;
|
|
520
|
+
// Get the latest version
|
|
521
|
+
const latestVersion = await getLatestSchemaVersion(schemaName);
|
|
522
|
+
if (latestVersion === 0) {
|
|
523
|
+
throw new Error(`Schema ${schemaName} not found`);
|
|
524
|
+
}
|
|
525
|
+
// Get schemaFileId from database
|
|
526
|
+
const schemaFileId = await getSchemaFileId(schemaName);
|
|
527
|
+
// Load the latest schema file
|
|
528
|
+
const latestFilePath = getSchemaFilePath$2(schemaName, latestVersion, schemaFileId);
|
|
529
|
+
const content = await BaseFileManager.readFileAsString(latestFilePath);
|
|
530
|
+
const schemaFile = JSON.parse(content);
|
|
531
|
+
if (!schemaFile.$schema) {
|
|
532
|
+
throw new Error(`Schema file ${latestFilePath} is not a complete schema file`);
|
|
533
|
+
}
|
|
534
|
+
if (!schemaFile.models[modelName]) {
|
|
535
|
+
throw new Error(`Model ${modelName} not found in schema ${schemaName}`);
|
|
536
|
+
}
|
|
537
|
+
// Create new version without the model
|
|
538
|
+
// Preserve IDs for remaining models and properties
|
|
539
|
+
const newVersion = latestVersion + 1;
|
|
540
|
+
const updatedSchema = {
|
|
541
|
+
...schemaFile,
|
|
542
|
+
version: newVersion,
|
|
543
|
+
id: schemaFile.id, // Preserve schema ID
|
|
544
|
+
metadata: {
|
|
545
|
+
...schemaFile.metadata,
|
|
546
|
+
updatedAt: new Date().toISOString(),
|
|
547
|
+
},
|
|
548
|
+
// Preserve IDs for remaining models and their properties
|
|
549
|
+
models: Object.fromEntries(Object.entries(schemaFile.models)
|
|
550
|
+
.filter(([name]) => name !== modelName)
|
|
551
|
+
.map(([name, model]) => [
|
|
552
|
+
name,
|
|
553
|
+
{
|
|
554
|
+
...model,
|
|
555
|
+
id: model.id, // Preserve model ID
|
|
556
|
+
properties: Object.fromEntries(Object.entries(model.properties).map(([propName, prop]) => [
|
|
557
|
+
propName,
|
|
558
|
+
{ ...prop, id: prop.id }, // Preserve property ID
|
|
559
|
+
])),
|
|
560
|
+
},
|
|
561
|
+
])),
|
|
562
|
+
migrations: [
|
|
563
|
+
...schemaFile.migrations,
|
|
564
|
+
{
|
|
565
|
+
version: newVersion,
|
|
566
|
+
timestamp: new Date().toISOString(),
|
|
567
|
+
description: `Deleted model ${modelName} from schema`,
|
|
568
|
+
changes: [
|
|
569
|
+
{
|
|
570
|
+
type: 'model_delete',
|
|
571
|
+
modelName,
|
|
572
|
+
removeReferencingProperties,
|
|
573
|
+
},
|
|
574
|
+
],
|
|
575
|
+
},
|
|
576
|
+
],
|
|
577
|
+
};
|
|
578
|
+
// Remove the model
|
|
579
|
+
delete updatedSchema.models[modelName];
|
|
580
|
+
// Handle properties that reference this model
|
|
581
|
+
const propertiesToRemove = [];
|
|
582
|
+
for (const [otherModelName, model] of Object.entries(updatedSchema.models)) {
|
|
583
|
+
for (const [propertyName, property] of Object.entries(model.properties)) {
|
|
584
|
+
// Check if property references the deleted model
|
|
585
|
+
if (property.model === modelName || property.items?.model === modelName) {
|
|
586
|
+
if (removeReferencingProperties) {
|
|
587
|
+
// Mark for removal
|
|
588
|
+
propertiesToRemove.push({ modelName: otherModelName, propertyName });
|
|
589
|
+
}
|
|
590
|
+
else {
|
|
591
|
+
// Remove the reference (set to null or remove model field)
|
|
592
|
+
if (property.model === modelName) {
|
|
593
|
+
delete property.model;
|
|
594
|
+
}
|
|
595
|
+
if (property.items?.model === modelName) {
|
|
596
|
+
delete property.items.model;
|
|
597
|
+
// If items only had model, we might want to remove items entirely
|
|
598
|
+
// But for now, just remove the model reference
|
|
599
|
+
}
|
|
600
|
+
}
|
|
601
|
+
}
|
|
602
|
+
}
|
|
603
|
+
}
|
|
604
|
+
// Remove properties if requested
|
|
605
|
+
if (removeReferencingProperties) {
|
|
606
|
+
for (const { modelName: mName, propertyName } of propertiesToRemove) {
|
|
607
|
+
delete updatedSchema.models[mName].properties[propertyName];
|
|
608
|
+
updatedSchema.migrations[updatedSchema.migrations.length - 1].changes.push({
|
|
609
|
+
type: 'property_delete',
|
|
610
|
+
modelName: mName,
|
|
611
|
+
propertyName,
|
|
612
|
+
reason: `Referenced deleted model ${modelName}`,
|
|
613
|
+
});
|
|
614
|
+
}
|
|
615
|
+
}
|
|
616
|
+
else {
|
|
617
|
+
// Update migration to note which properties were updated
|
|
618
|
+
const updatedProperties = [];
|
|
619
|
+
for (const [otherModelName, model] of Object.entries(updatedSchema.models)) {
|
|
620
|
+
for (const [propertyName, property] of Object.entries(model.properties)) {
|
|
621
|
+
if (property.model === modelName || property.items?.model === modelName) {
|
|
622
|
+
updatedProperties.push({ modelName: otherModelName, propertyName });
|
|
623
|
+
}
|
|
624
|
+
}
|
|
625
|
+
}
|
|
626
|
+
if (updatedProperties.length > 0) {
|
|
627
|
+
updatedSchema.migrations[updatedSchema.migrations.length - 1].changes.push({
|
|
628
|
+
type: 'properties_updated',
|
|
629
|
+
properties: updatedProperties,
|
|
630
|
+
reason: `Removed references to deleted model ${modelName}`,
|
|
631
|
+
});
|
|
632
|
+
}
|
|
633
|
+
}
|
|
634
|
+
// Write the new schema version using ID-based naming (preferred)
|
|
635
|
+
const newFilePath = getSchemaFilePath$2(schemaName, newVersion, updatedSchema.id ?? undefined);
|
|
636
|
+
const newContent = JSON.stringify(updatedSchema, null, 2);
|
|
637
|
+
// Ensure the directory exists before saving
|
|
638
|
+
const path = BaseFileManager.getPathModule();
|
|
639
|
+
const dir = path.dirname(newFilePath);
|
|
640
|
+
await BaseFileManager.createDirIfNotExists(dir);
|
|
641
|
+
await BaseFileManager.saveFile(newFilePath, newContent);
|
|
642
|
+
// Wait for the file to be available with content (important for browser/OPFS where writes may not be immediately readable)
|
|
643
|
+
await BaseFileManager.waitForFileWithContent(newFilePath);
|
|
644
|
+
logger$d(`Deleted model ${modelName} from schema ${schemaName} v${newVersion}`);
|
|
645
|
+
// Load the new schema file
|
|
646
|
+
await loadSchemaFromFile(newFilePath);
|
|
647
|
+
return newFilePath;
|
|
648
|
+
}
|
|
649
|
+
/**
|
|
650
|
+
* Delete a property from a model in a schema
|
|
651
|
+
* @param schemaName - The name of the schema
|
|
652
|
+
* @param modelName - The name of the model
|
|
653
|
+
* @param propertyName - The name of the property to delete
|
|
654
|
+
* @param options - Optional deletion options
|
|
655
|
+
* @returns The file path of the new schema version
|
|
656
|
+
* @throws Error if schema, model, or property not found
|
|
657
|
+
*/
|
|
658
|
+
async function deletePropertyFromModel(schemaName, modelName, propertyName, options = {}) {
|
|
659
|
+
// Get the latest version
|
|
660
|
+
const latestVersion = await getLatestSchemaVersion(schemaName);
|
|
661
|
+
if (latestVersion === 0) {
|
|
662
|
+
throw new Error(`Schema ${schemaName} not found`);
|
|
663
|
+
}
|
|
664
|
+
// Get schemaFileId from database
|
|
665
|
+
const schemaFileId = await getSchemaFileId(schemaName);
|
|
666
|
+
// Load the latest schema file
|
|
667
|
+
const latestFilePath = getSchemaFilePath$2(schemaName, latestVersion, schemaFileId);
|
|
668
|
+
const content = await BaseFileManager.readFileAsString(latestFilePath);
|
|
669
|
+
const schemaFile = JSON.parse(content);
|
|
670
|
+
if (!schemaFile.$schema) {
|
|
671
|
+
throw new Error(`Schema file ${latestFilePath} is not a complete schema file`);
|
|
672
|
+
}
|
|
673
|
+
const model = schemaFile.models[modelName];
|
|
674
|
+
if (!model) {
|
|
675
|
+
throw new Error(`Model ${modelName} not found in schema ${schemaName}`);
|
|
676
|
+
}
|
|
677
|
+
if (!model.properties[propertyName]) {
|
|
678
|
+
throw new Error(`Property ${propertyName} not found in model ${modelName} of schema ${schemaName}`);
|
|
679
|
+
}
|
|
680
|
+
// Create new version without the property
|
|
681
|
+
// Preserve IDs for schema, models, and remaining properties
|
|
682
|
+
const newVersion = latestVersion + 1;
|
|
683
|
+
const updatedSchema = {
|
|
684
|
+
...schemaFile,
|
|
685
|
+
version: newVersion,
|
|
686
|
+
id: schemaFile.id, // Preserve schema ID
|
|
687
|
+
metadata: {
|
|
688
|
+
...schemaFile.metadata,
|
|
689
|
+
updatedAt: new Date().toISOString(),
|
|
690
|
+
},
|
|
691
|
+
models: Object.fromEntries(Object.entries(schemaFile.models).map(([mName, m]) => [
|
|
692
|
+
mName,
|
|
693
|
+
{
|
|
694
|
+
...m,
|
|
695
|
+
id: m.id, // Preserve model ID
|
|
696
|
+
properties: Object.fromEntries(Object.entries(m.properties)
|
|
697
|
+
.filter(([propName]) => !(mName === modelName && propName === propertyName))
|
|
698
|
+
.map(([propName, prop]) => [
|
|
699
|
+
propName,
|
|
700
|
+
{ ...prop, id: prop.id }, // Preserve property ID
|
|
701
|
+
])),
|
|
702
|
+
},
|
|
703
|
+
])),
|
|
704
|
+
migrations: [
|
|
705
|
+
...schemaFile.migrations,
|
|
706
|
+
{
|
|
707
|
+
version: newVersion,
|
|
708
|
+
timestamp: new Date().toISOString(),
|
|
709
|
+
description: `Deleted property ${modelName}.${propertyName} from schema`,
|
|
710
|
+
changes: [
|
|
711
|
+
{
|
|
712
|
+
type: 'property_delete',
|
|
713
|
+
modelName,
|
|
714
|
+
propertyName,
|
|
715
|
+
},
|
|
716
|
+
],
|
|
717
|
+
},
|
|
718
|
+
],
|
|
719
|
+
};
|
|
720
|
+
// Write the new schema version using ID-based naming (preferred)
|
|
721
|
+
const newFilePath = getSchemaFilePath$2(schemaName, newVersion, updatedSchema.id ?? undefined);
|
|
722
|
+
const newContent = JSON.stringify(updatedSchema, null, 2);
|
|
723
|
+
// Ensure the directory exists before saving
|
|
724
|
+
const path = BaseFileManager.getPathModule();
|
|
725
|
+
const dir = path.dirname(newFilePath);
|
|
726
|
+
await BaseFileManager.createDirIfNotExists(dir);
|
|
727
|
+
await BaseFileManager.saveFile(newFilePath, newContent);
|
|
728
|
+
// Wait for the file to be available with content (important for browser/OPFS where writes may not be immediately readable)
|
|
729
|
+
await BaseFileManager.waitForFileWithContent(newFilePath);
|
|
730
|
+
logger$d(`Deleted property ${propertyName} from model ${modelName} in schema ${schemaName} v${newVersion}`);
|
|
731
|
+
// Load the new schema file
|
|
732
|
+
await loadSchemaFromFile(newFilePath);
|
|
733
|
+
return newFilePath;
|
|
734
|
+
}
|
|
20
735
|
|
|
21
736
|
const logger$c = debug('seedSdk:schema:actors:loadOrCreateSchema');
|
|
22
737
|
/**
|
|
@@ -32,13 +747,12 @@ const getModelIdsForSchema$1 = async (schemaId) => {
|
|
|
32
747
|
return [];
|
|
33
748
|
}
|
|
34
749
|
try {
|
|
35
|
-
const { modelSchemas, models: modelsTable } = await import('./index-r45w9hEq.js').then(function (n) { return n.s; });
|
|
36
750
|
const modelRecords = await db
|
|
37
751
|
.select({
|
|
38
|
-
modelFileId:
|
|
752
|
+
modelFileId: models.schemaFileId,
|
|
39
753
|
})
|
|
40
754
|
.from(modelSchemas)
|
|
41
|
-
.innerJoin(
|
|
755
|
+
.innerJoin(models, eq(modelSchemas.modelId, models.id))
|
|
42
756
|
.where(eq(modelSchemas.schemaId, schemaId));
|
|
43
757
|
const modelIds = modelRecords
|
|
44
758
|
.map((row) => row.modelFileId)
|
|
@@ -61,7 +775,7 @@ const createModelInstances$2 = async (modelIds) => {
|
|
|
61
775
|
return;
|
|
62
776
|
}
|
|
63
777
|
try {
|
|
64
|
-
const { Model } = await import('./
|
|
778
|
+
const { Model } = await import('./index-DPll6EAp.js').then(function (n) { return n.aW; });
|
|
65
779
|
// Create instances for all model IDs in parallel
|
|
66
780
|
// Model.createById() will check cache first, then query DB and create if needed
|
|
67
781
|
const createPromises = modelIds.map(async (modelFileId) => {
|
|
@@ -101,13 +815,11 @@ const verifyPropertiesPersisted = async (modelId, modelName, maxRetries = 10, re
|
|
|
101
815
|
if (!db) {
|
|
102
816
|
throw new Error('Database not available for property verification');
|
|
103
817
|
}
|
|
104
|
-
const { properties: propertiesTable } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aJ; });
|
|
105
|
-
const { eq } = await import('drizzle-orm');
|
|
106
818
|
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
|
107
819
|
const props = await db
|
|
108
820
|
.select()
|
|
109
|
-
.from(
|
|
110
|
-
.where(eq(
|
|
821
|
+
.from(properties)
|
|
822
|
+
.where(eq(properties.modelId, modelId))
|
|
111
823
|
.limit(1);
|
|
112
824
|
if (props.length > 0) {
|
|
113
825
|
logger$c(`Verified properties exist for model "${modelName}" (modelId: ${modelId}) after ${attempt + 1} attempt(s)`);
|
|
@@ -169,15 +881,14 @@ const getSchemaFilePath$1 = (name, version, schemaFileId) => {
|
|
|
169
881
|
const loadOrCreateSchema = fromCallback(({ sendBack, input: { context } }) => {
|
|
170
882
|
const _loadOrCreateSchema = async () => {
|
|
171
883
|
const { schemaName } = context;
|
|
172
|
-
// Check if this is an internal SDK schema (should not create files in app directory)
|
|
173
|
-
const { isInternalSchema, SEED_PROTOCOL_SCHEMA_NAME } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aQ; });
|
|
884
|
+
// Check if this is an internal SDK schema (should not create files in app directory) — use static import so consumer bundles resolve correctly
|
|
174
885
|
const isInternal = isInternalSchema(schemaName);
|
|
175
886
|
if (isInternal && schemaName === SEED_PROTOCOL_SCHEMA_NAME) {
|
|
176
887
|
// For Seed Protocol, always load from internal file, never create new
|
|
177
888
|
logger$c(`Loading internal Seed Protocol schema from SDK`);
|
|
178
889
|
console.log('[loadOrCreateSchema] Loading internal Seed Protocol schema from SDK');
|
|
179
890
|
try {
|
|
180
|
-
const internalSchema = await import('./
|
|
891
|
+
const internalSchema = await import('./index-DPll6EAp.js').then(function (n) { return n.aV; });
|
|
181
892
|
const schemaFile = internalSchema.default;
|
|
182
893
|
console.log('[loadOrCreateSchema] Loaded schema file, models count:', Object.keys(schemaFile.models || {}).length);
|
|
183
894
|
// Check if it exists in database, if not, add it
|
|
@@ -202,8 +913,6 @@ const loadOrCreateSchema = fromCallback(({ sendBack, input: { context } }) => {
|
|
|
202
913
|
logger$c(`Added Seed Protocol schema to database`);
|
|
203
914
|
// Also add models and properties to database
|
|
204
915
|
// Convert to JsonImportSchema format for processing
|
|
205
|
-
const { createModelsFromJson } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aW; });
|
|
206
|
-
const { addModelsToDb } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aR; });
|
|
207
916
|
const importData = {
|
|
208
917
|
name: schemaName,
|
|
209
918
|
models: Object.fromEntries(Object.entries(schemaFile.models || {}).map(([modelName, model]) => [
|
|
@@ -292,15 +1001,13 @@ const loadOrCreateSchema = fromCallback(({ sendBack, input: { context } }) => {
|
|
|
292
1001
|
logger$c(`Added ${Object.keys(modelDefinitions).length} models and their properties to database for Seed Protocol schema`);
|
|
293
1002
|
// Verify properties are persisted (important for browser environments)
|
|
294
1003
|
// Query the database to get model IDs that were just created
|
|
295
|
-
const { modelSchemas } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aO; });
|
|
296
|
-
const { models: modelsTable } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aJ; });
|
|
297
1004
|
const modelLinks = await db
|
|
298
1005
|
.select({
|
|
299
1006
|
modelId: modelSchemas.modelId,
|
|
300
|
-
modelName:
|
|
1007
|
+
modelName: models.name,
|
|
301
1008
|
})
|
|
302
1009
|
.from(modelSchemas)
|
|
303
|
-
.innerJoin(
|
|
1010
|
+
.innerJoin(models, eq(modelSchemas.modelId, models.id))
|
|
304
1011
|
.where(eq(modelSchemas.schemaId, schemaRecord.id));
|
|
305
1012
|
// Verify properties for at least one model (Seed model if available)
|
|
306
1013
|
const seedModelLink = modelLinks.find((link) => link.modelName === 'Seed');
|
|
@@ -317,17 +1024,14 @@ const loadOrCreateSchema = fromCallback(({ sendBack, input: { context } }) => {
|
|
|
317
1024
|
// Schema exists, but always ensure models/properties are in database
|
|
318
1025
|
// This handles the case where schema was added but models weren't (from previous code)
|
|
319
1026
|
// or where models were added but properties weren't
|
|
320
|
-
const { modelSchemas } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aO; });
|
|
321
|
-
const { models: modelsTable } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aJ; });
|
|
322
|
-
const { properties: propertiesTable } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aJ; });
|
|
323
1027
|
// Check if models are linked to the schema
|
|
324
1028
|
const modelLinks = await db
|
|
325
1029
|
.select({
|
|
326
1030
|
modelId: modelSchemas.modelId,
|
|
327
|
-
modelName:
|
|
1031
|
+
modelName: models.name,
|
|
328
1032
|
})
|
|
329
1033
|
.from(modelSchemas)
|
|
330
|
-
.innerJoin(
|
|
1034
|
+
.innerJoin(models, eq(modelSchemas.modelId, models.id))
|
|
331
1035
|
.where(eq(modelSchemas.schemaId, schemaRecord.id));
|
|
332
1036
|
// Check if we have all expected models
|
|
333
1037
|
const expectedModelNames = Object.keys(schemaFile.models || {});
|
|
@@ -343,8 +1047,8 @@ const loadOrCreateSchema = fromCallback(({ sendBack, input: { context } }) => {
|
|
|
343
1047
|
if (link.modelId) {
|
|
344
1048
|
const props = await db
|
|
345
1049
|
.select()
|
|
346
|
-
.from(
|
|
347
|
-
.where(eq(
|
|
1050
|
+
.from(properties)
|
|
1051
|
+
.where(eq(properties.modelId, link.modelId))
|
|
348
1052
|
.limit(1);
|
|
349
1053
|
if (props.length === 0) {
|
|
350
1054
|
missingProperties = true;
|
|
@@ -358,8 +1062,6 @@ const loadOrCreateSchema = fromCallback(({ sendBack, input: { context } }) => {
|
|
|
358
1062
|
if (missingModels.length > 0 || missingProperties || modelLinks.length === 0) {
|
|
359
1063
|
logger$c(`Seed Protocol schema exists but models/properties incomplete (missing models: ${missingModels.length}, missing properties: ${missingProperties}), adding them now`);
|
|
360
1064
|
console.log(`[loadOrCreateSchema] Adding models/properties: missingModels=${missingModels.length}, missingProperties=${missingProperties}, modelLinks=${modelLinks.length}`);
|
|
361
|
-
const { createModelsFromJson } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aW; });
|
|
362
|
-
const { addModelsToDb } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aR; });
|
|
363
1065
|
// Convert SchemaFileFormat to JsonImportSchema format
|
|
364
1066
|
// Schema format: { dataType, ref, refValueType, storageType, localStorageDir, filenameSuffix }
|
|
365
1067
|
// JSON import format: { type, model, items, storage: { type, path, extension } }
|
|
@@ -463,10 +1165,10 @@ const loadOrCreateSchema = fromCallback(({ sendBack, input: { context } }) => {
|
|
|
463
1165
|
updatedModelLinks = await db
|
|
464
1166
|
.select({
|
|
465
1167
|
modelId: modelSchemas.modelId,
|
|
466
|
-
modelName:
|
|
1168
|
+
modelName: models.name,
|
|
467
1169
|
})
|
|
468
1170
|
.from(modelSchemas)
|
|
469
|
-
.innerJoin(
|
|
1171
|
+
.innerJoin(models, eq(modelSchemas.modelId, models.id))
|
|
470
1172
|
.where(eq(modelSchemas.schemaId, schemaRecord.id));
|
|
471
1173
|
const linkedModelNames = updatedModelLinks
|
|
472
1174
|
.map((link) => link.modelName)
|
|
@@ -686,7 +1388,7 @@ const loadOrCreateSchema = fromCallback(({ sendBack, input: { context } }) => {
|
|
|
686
1388
|
// Ensure models are populated (fallback for seed-protocol if missing)
|
|
687
1389
|
if ((!mergedModels || Object.keys(mergedModels).length === 0) && schemaName === 'Seed Protocol') {
|
|
688
1390
|
try {
|
|
689
|
-
const internalSchema = await import('./
|
|
1391
|
+
const internalSchema = await import('./index-DPll6EAp.js').then(function (n) { return n.aV; });
|
|
690
1392
|
const internalSchemaFile = internalSchema.default;
|
|
691
1393
|
mergedModels = { ...(internalSchemaFile.models || {}) };
|
|
692
1394
|
logger$c(`Populated models for seed-protocol schema from internal file`);
|
|
@@ -881,7 +1583,7 @@ const loadOrCreateSchema = fromCallback(({ sendBack, input: { context } }) => {
|
|
|
881
1583
|
// Ensure models are populated (fallback for seed-protocol if missing)
|
|
882
1584
|
if ((!mergedModels || Object.keys(mergedModels).length === 0) && schemaName === 'Seed Protocol') {
|
|
883
1585
|
try {
|
|
884
|
-
const internalSchema = await import('./
|
|
1586
|
+
const internalSchema = await import('./index-DPll6EAp.js').then(function (n) { return n.aV; });
|
|
885
1587
|
const internalSchemaFile = internalSchema.default;
|
|
886
1588
|
mergedModels = { ...(internalSchemaFile.models || {}) };
|
|
887
1589
|
logger$c(`Populated models for seed-protocol schema from internal file`);
|
|
@@ -953,7 +1655,7 @@ const loadOrCreateSchema = fromCallback(({ sendBack, input: { context } }) => {
|
|
|
953
1655
|
// Ensure models are populated (fallback for seed-protocol if missing)
|
|
954
1656
|
if ((!mergedModels || Object.keys(mergedModels).length === 0) && schemaName === 'Seed Protocol') {
|
|
955
1657
|
try {
|
|
956
|
-
const internalSchema = await import('./
|
|
1658
|
+
const internalSchema = await import('./index-DPll6EAp.js').then(function (n) { return n.aV; });
|
|
957
1659
|
const internalSchemaFile = internalSchema.default;
|
|
958
1660
|
mergedModels = { ...(internalSchemaFile.models || {}) };
|
|
959
1661
|
logger$c(`Populated models for seed-protocol schema from internal file`);
|
|
@@ -1022,7 +1724,7 @@ const loadOrCreateSchema = fromCallback(({ sendBack, input: { context } }) => {
|
|
|
1022
1724
|
// If schemaData is missing, try to load from internal schema file for seed-protocol
|
|
1023
1725
|
if (!dbSchema.schemaData && schemaName === 'Seed Protocol') {
|
|
1024
1726
|
try {
|
|
1025
|
-
const internalSchema = await import('./
|
|
1727
|
+
const internalSchema = await import('./index-DPll6EAp.js').then(function (n) { return n.aV; });
|
|
1026
1728
|
const schemaFile = internalSchema.default;
|
|
1027
1729
|
logger$c(`Found seed-protocol schema in internal file (schemaData missing, using internal schema)`);
|
|
1028
1730
|
// Update database with schemaData for future loads
|
|
@@ -1131,7 +1833,7 @@ const loadOrCreateSchema = fromCallback(({ sendBack, input: { context } }) => {
|
|
|
1131
1833
|
// Ensure models are populated (fallback for seed-protocol if missing)
|
|
1132
1834
|
if ((!mergedModels || Object.keys(mergedModels).length === 0) && schemaName === 'Seed Protocol') {
|
|
1133
1835
|
try {
|
|
1134
|
-
const internalSchema = await import('./
|
|
1836
|
+
const internalSchema = await import('./index-DPll6EAp.js').then(function (n) { return n.aV; });
|
|
1135
1837
|
const internalSchemaFile = internalSchema.default;
|
|
1136
1838
|
mergedModels = { ...(internalSchemaFile.models || {}) };
|
|
1137
1839
|
logger$c(`Populated models for seed-protocol schema from internal file`);
|
|
@@ -1447,18 +2149,15 @@ const addModelsMachine = setup({
|
|
|
1447
2149
|
}),
|
|
1448
2150
|
createModelInstances: fromCallback(({ sendBack, input }) => {
|
|
1449
2151
|
const _createInstances = async () => {
|
|
1450
|
-
const { Schema } = await Promise.resolve().then(function () { return Schema$1; });
|
|
1451
|
-
const { BaseDb } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aI; });
|
|
1452
|
-
const { models: modelsTable } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aJ; });
|
|
1453
|
-
const { eq } = await import('drizzle-orm');
|
|
1454
|
-
const { generateId } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aL; });
|
|
1455
2152
|
const debug = (await import('debug')).default;
|
|
1456
2153
|
const logger = debug('seedSdk:schema:addModels:createInstances');
|
|
2154
|
+
const { Schema: SchemaClass, schemaInstanceState } = await Promise.resolve().then(function () { return Schema$1; });
|
|
1457
2155
|
const schemaName = input.schemaContext.metadata?.name || input.schemaContext.schemaName;
|
|
1458
|
-
const schemaInstance =
|
|
2156
|
+
const schemaInstance = SchemaClass.create(schemaName, {
|
|
2157
|
+
waitForReady: false,
|
|
2158
|
+
});
|
|
1459
2159
|
const modelInstances = new Map();
|
|
1460
2160
|
// Get instance state to store model instances
|
|
1461
|
-
const { schemaInstanceState } = await Promise.resolve().then(function () { return Schema$1; });
|
|
1462
2161
|
const instanceState = schemaInstanceState.get(schemaInstance);
|
|
1463
2162
|
if (!instanceState) {
|
|
1464
2163
|
throw new Error('Schema instance state not found');
|
|
@@ -1473,8 +2172,8 @@ const addModelsMachine = setup({
|
|
|
1473
2172
|
if (db) {
|
|
1474
2173
|
const dbModels = await db
|
|
1475
2174
|
.select()
|
|
1476
|
-
.from(
|
|
1477
|
-
.where(eq(
|
|
2175
|
+
.from(models)
|
|
2176
|
+
.where(eq(models.name, modelName))
|
|
1478
2177
|
.limit(1);
|
|
1479
2178
|
if (dbModels.length > 0 && dbModels[0].schemaFileId) {
|
|
1480
2179
|
modelFileId = dbModels[0].schemaFileId;
|
|
@@ -1492,7 +2191,10 @@ const addModelsMachine = setup({
|
|
|
1492
2191
|
}
|
|
1493
2192
|
// Create new Model instance with modelFileId
|
|
1494
2193
|
// Model.create() will set _modelFileId in the context automatically
|
|
1495
|
-
const modelInstance = Model.create(modelName, schemaName, {
|
|
2194
|
+
const modelInstance = Model.create(modelName, schemaName, {
|
|
2195
|
+
modelFileId,
|
|
2196
|
+
waitForReady: false,
|
|
2197
|
+
});
|
|
1496
2198
|
const service = modelInstance.getService();
|
|
1497
2199
|
logger(`Created Model instance for "${modelName}" with modelFileId "${modelFileId}"`);
|
|
1498
2200
|
// Wait for the Model service to finish loading (loadOrCreateModel completes)
|
|
@@ -1673,11 +2375,6 @@ const addModelsMachine = setup({
|
|
|
1673
2375
|
const _persist = async () => {
|
|
1674
2376
|
const debug = (await import('debug')).default;
|
|
1675
2377
|
const logger = debug('seedSdk:schema:addModels:persist');
|
|
1676
|
-
const { createModelFromJson } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aW; });
|
|
1677
|
-
const { addModelsToDb } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aR; });
|
|
1678
|
-
const { BaseDb } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aI; });
|
|
1679
|
-
const { schemas: schemasTable } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aN; });
|
|
1680
|
-
const { eq } = await import('drizzle-orm');
|
|
1681
2378
|
// Only process in browser environment where store is available
|
|
1682
2379
|
if (typeof window === 'undefined') {
|
|
1683
2380
|
logger('Not in browser environment, skipping store update');
|
|
@@ -1713,8 +2410,8 @@ const addModelsMachine = setup({
|
|
|
1713
2410
|
logger(`Looking up schema by schemaFileId: ${schemaFileId} (attempt ${attempt + 1})`);
|
|
1714
2411
|
const schemasById = await db
|
|
1715
2412
|
.select()
|
|
1716
|
-
.from(
|
|
1717
|
-
.where(eq(
|
|
2413
|
+
.from(schemas)
|
|
2414
|
+
.where(eq(schemas.schemaFileId, schemaFileId))
|
|
1718
2415
|
.limit(1);
|
|
1719
2416
|
if (schemasById.length > 0) {
|
|
1720
2417
|
schemaRecord = schemasById[0];
|
|
@@ -1730,12 +2427,11 @@ const addModelsMachine = setup({
|
|
|
1730
2427
|
// FALLBACK: If not found by schemaFileId, try by name (prefer draft records)
|
|
1731
2428
|
if (!schemaRecord) {
|
|
1732
2429
|
logger(`Looking up schema by name: "${schemaName}" (attempt ${attempt + 1})`);
|
|
1733
|
-
const { desc } = await import('drizzle-orm');
|
|
1734
2430
|
const schemasByName = await db
|
|
1735
2431
|
.select()
|
|
1736
|
-
.from(
|
|
1737
|
-
.where(eq(
|
|
1738
|
-
.orderBy(desc(
|
|
2432
|
+
.from(schemas)
|
|
2433
|
+
.where(eq(schemas.name, schemaName))
|
|
2434
|
+
.orderBy(desc(schemas.isDraft), desc(schemas.version))
|
|
1739
2435
|
.limit(10); // Get multiple to find drafts
|
|
1740
2436
|
if (schemasByName.length > 0) {
|
|
1741
2437
|
// Prefer draft records
|
|
@@ -1991,13 +2687,12 @@ const getModelIdsForSchema = async (schemaId) => {
|
|
|
1991
2687
|
return [];
|
|
1992
2688
|
}
|
|
1993
2689
|
try {
|
|
1994
|
-
const { modelSchemas, models: modelsTable } = await import('./index-r45w9hEq.js').then(function (n) { return n.s; });
|
|
1995
2690
|
const modelRecords = await db
|
|
1996
2691
|
.select({
|
|
1997
|
-
modelFileId:
|
|
2692
|
+
modelFileId: models.schemaFileId,
|
|
1998
2693
|
})
|
|
1999
2694
|
.from(modelSchemas)
|
|
2000
|
-
.innerJoin(
|
|
2695
|
+
.innerJoin(models, eq(modelSchemas.modelId, models.id))
|
|
2001
2696
|
.where(eq(modelSchemas.schemaId, schemaId));
|
|
2002
2697
|
const modelIds = modelRecords
|
|
2003
2698
|
.map((row) => row.modelFileId)
|
|
@@ -2035,7 +2730,12 @@ const createModelInstances$1 = async (modelIds) => {
|
|
|
2035
2730
|
return;
|
|
2036
2731
|
}
|
|
2037
2732
|
try {
|
|
2038
|
-
const
|
|
2733
|
+
const mod = await import('./index-DPll6EAp.js').then(function (n) { return n.aW; });
|
|
2734
|
+
const Model = mod?.Model ?? mod?.default;
|
|
2735
|
+
if (!Model) {
|
|
2736
|
+
logger$b('Model not available from dynamic import');
|
|
2737
|
+
return;
|
|
2738
|
+
}
|
|
2039
2739
|
const createPromises = modelIds.map(async (modelFileId) => {
|
|
2040
2740
|
try {
|
|
2041
2741
|
const model = await Model.createById(modelFileId);
|
|
@@ -2070,13 +2770,12 @@ const checkExistingSchema = fromCallback(({ sendBack, input: { context } }) => {
|
|
|
2070
2770
|
});
|
|
2071
2771
|
return;
|
|
2072
2772
|
}
|
|
2073
|
-
// Check if this is an internal SDK schema (Seed Protocol)
|
|
2074
|
-
const { isInternalSchema, SEED_PROTOCOL_SCHEMA_NAME } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aQ; });
|
|
2773
|
+
// Check if this is an internal SDK schema (Seed Protocol) — use static import so consumer bundles resolve correctly
|
|
2075
2774
|
const isInternal = isInternalSchema(schemaName);
|
|
2076
2775
|
if (isInternal && schemaName === SEED_PROTOCOL_SCHEMA_NAME) {
|
|
2077
2776
|
// For Seed Protocol, check if it exists in database
|
|
2078
2777
|
try {
|
|
2079
|
-
const internalSchema = await import('./
|
|
2778
|
+
const internalSchema = await import('./index-DPll6EAp.js').then(function (n) { return n.aV; });
|
|
2080
2779
|
const schemaFile = internalSchema.default;
|
|
2081
2780
|
if (db && schemaFile.id) {
|
|
2082
2781
|
const existing = await db
|
|
@@ -2427,15 +3126,14 @@ const writeSchemaToDb = fromCallback(({ sendBack, input }) => {
|
|
|
2427
3126
|
const _write = async () => {
|
|
2428
3127
|
const { schemaName, schemaFile, existingDbSchema } = input;
|
|
2429
3128
|
try {
|
|
2430
|
-
// Check if this is an internal SDK schema (Seed Protocol)
|
|
2431
|
-
const { isInternalSchema, SEED_PROTOCOL_SCHEMA_NAME } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aQ; });
|
|
3129
|
+
// Check if this is an internal SDK schema (Seed Protocol) — use static import so consumer bundles resolve correctly
|
|
2432
3130
|
const isInternal = isInternalSchema(schemaName);
|
|
2433
3131
|
let finalSchema;
|
|
2434
3132
|
let schemaRecord;
|
|
2435
3133
|
if (isInternal && schemaName === SEED_PROTOCOL_SCHEMA_NAME) {
|
|
2436
3134
|
// For Seed Protocol, load from internal file
|
|
2437
3135
|
logger$a(`Loading internal Seed Protocol schema from SDK`);
|
|
2438
|
-
const internalSchema = await import('./
|
|
3136
|
+
const internalSchema = await import('./index-DPll6EAp.js').then(function (n) { return n.aV; });
|
|
2439
3137
|
finalSchema = internalSchema.default;
|
|
2440
3138
|
const db = BaseDb.getAppDb();
|
|
2441
3139
|
if (db && finalSchema.id) {
|
|
@@ -2631,10 +3329,6 @@ const writeModelsToDb = fromCallback(({ sendBack, input }) => {
|
|
|
2631
3329
|
const { schema, schemaRecord, schemaName } = input;
|
|
2632
3330
|
try {
|
|
2633
3331
|
// Check if models already exist in database
|
|
2634
|
-
const { BaseDb } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aI; });
|
|
2635
|
-
const { modelSchemas } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aO; });
|
|
2636
|
-
const { models: modelsTable } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aJ; });
|
|
2637
|
-
const { eq } = await import('drizzle-orm');
|
|
2638
3332
|
const db = BaseDb.getAppDb();
|
|
2639
3333
|
if (!db) {
|
|
2640
3334
|
throw new Error('Database not available');
|
|
@@ -2643,10 +3337,10 @@ const writeModelsToDb = fromCallback(({ sendBack, input }) => {
|
|
|
2643
3337
|
const modelLinks = await db
|
|
2644
3338
|
.select({
|
|
2645
3339
|
modelId: modelSchemas.modelId,
|
|
2646
|
-
modelName:
|
|
3340
|
+
modelName: models.name,
|
|
2647
3341
|
})
|
|
2648
3342
|
.from(modelSchemas)
|
|
2649
|
-
.innerJoin(
|
|
3343
|
+
.innerJoin(models, eq(modelSchemas.modelId, models.id))
|
|
2650
3344
|
.where(eq(modelSchemas.schemaId, schemaRecord.id));
|
|
2651
3345
|
// Check if we have all expected models
|
|
2652
3346
|
const expectedModelNames = Object.keys(schema.models || {});
|
|
@@ -2662,9 +3356,9 @@ const writeModelsToDb = fromCallback(({ sendBack, input }) => {
|
|
|
2662
3356
|
for (const link of modelLinks) {
|
|
2663
3357
|
if (link.modelId) {
|
|
2664
3358
|
const modelRecord = await db
|
|
2665
|
-
.select({ schemaFileId:
|
|
2666
|
-
.from(
|
|
2667
|
-
.where(eq(
|
|
3359
|
+
.select({ schemaFileId: models.schemaFileId })
|
|
3360
|
+
.from(models)
|
|
3361
|
+
.where(eq(models.id, link.modelId))
|
|
2668
3362
|
.limit(1);
|
|
2669
3363
|
if (modelRecord.length > 0 && modelRecord[0].schemaFileId) {
|
|
2670
3364
|
modelFileIds.push(modelRecord[0].schemaFileId);
|
|
@@ -2677,10 +3371,7 @@ const writeModelsToDb = fromCallback(({ sendBack, input }) => {
|
|
|
2677
3371
|
});
|
|
2678
3372
|
return;
|
|
2679
3373
|
}
|
|
2680
|
-
//
|
|
2681
|
-
const { createModelsFromJson } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aW; });
|
|
2682
|
-
// Check if this is Seed Protocol schema (has different format)
|
|
2683
|
-
const { isInternalSchema, SEED_PROTOCOL_SCHEMA_NAME } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aQ; });
|
|
3374
|
+
// Check if this is Seed Protocol schema (has different format) — use static import so consumer bundles resolve correctly
|
|
2684
3375
|
const isInternal = isInternalSchema(schemaName);
|
|
2685
3376
|
let importData;
|
|
2686
3377
|
if (isInternal && schemaName === SEED_PROTOCOL_SCHEMA_NAME) {
|
|
@@ -2910,7 +3601,7 @@ const createModelInstances = fromCallback(({ sendBack, input }) => {
|
|
|
2910
3601
|
return;
|
|
2911
3602
|
}
|
|
2912
3603
|
try {
|
|
2913
|
-
const { Model } = await import('./
|
|
3604
|
+
const { Model } = await import('./index-DPll6EAp.js').then(function (n) { return n.aW; });
|
|
2914
3605
|
// Create instances for all model IDs in parallel
|
|
2915
3606
|
// Model.createById() will check cache first, then query DB and create if needed
|
|
2916
3607
|
const createPromises = modelIds.map(async (modelFileId) => {
|
|
@@ -2995,7 +3686,7 @@ const verifyModelInstancesInCache = fromCallback(({ sendBack, input }) => {
|
|
|
2995
3686
|
}
|
|
2996
3687
|
try {
|
|
2997
3688
|
const result = await verifyWithRetry$2(async () => {
|
|
2998
|
-
const { Model } = await import('./
|
|
3689
|
+
const { Model } = await import('./index-DPll6EAp.js').then(function (n) { return n.aW; });
|
|
2999
3690
|
// Check each model ID in the cache
|
|
3000
3691
|
const verifiedInstances = [];
|
|
3001
3692
|
const missingIds = [];
|
|
@@ -3051,8 +3742,7 @@ const writePropertiesToDb = fromCallback(({ sendBack, input }) => {
|
|
|
3051
3742
|
const _write = async () => {
|
|
3052
3743
|
const { modelIds } = input;
|
|
3053
3744
|
try {
|
|
3054
|
-
const {
|
|
3055
|
-
const { models: modelsTable, properties: propertiesTable } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aJ; });
|
|
3745
|
+
const { models: modelsTable, properties: propertiesTable } = await import('./index-DPll6EAp.js').then(function (n) { return n.aR; });
|
|
3056
3746
|
const { eq } = await import('drizzle-orm');
|
|
3057
3747
|
const db = BaseDb.getAppDb();
|
|
3058
3748
|
if (!db) {
|
|
@@ -3238,8 +3928,15 @@ const createPropertyInstances = fromCallback(({ sendBack, input }) => {
|
|
|
3238
3928
|
return;
|
|
3239
3929
|
}
|
|
3240
3930
|
try {
|
|
3241
|
-
const
|
|
3242
|
-
const
|
|
3931
|
+
const modProp = await import('./ModelProperty-CGdkocQ8.js');
|
|
3932
|
+
const ModelProperty = modProp?.ModelProperty ?? modProp?.default;
|
|
3933
|
+
const modModel = await import('./index-DPll6EAp.js').then(function (n) { return n.aW; });
|
|
3934
|
+
const Model = modModel?.Model ?? modModel?.default;
|
|
3935
|
+
if (!ModelProperty || !Model) {
|
|
3936
|
+
logger$2('ModelProperty or Model not available from dynamic import');
|
|
3937
|
+
sendBack({ type: 'instancesCreated', count: 0 });
|
|
3938
|
+
return;
|
|
3939
|
+
}
|
|
3243
3940
|
// Properties are typically loaded when Model instances are loaded
|
|
3244
3941
|
// But we can verify they exist by checking Model instances
|
|
3245
3942
|
let successCount = 0;
|
|
@@ -3338,7 +4035,11 @@ const verifyPropertyInstancesInCache = fromCallback(({ sendBack, input }) => {
|
|
|
3338
4035
|
}
|
|
3339
4036
|
try {
|
|
3340
4037
|
const result = await verifyWithRetry(async () => {
|
|
3341
|
-
const
|
|
4038
|
+
const mod = await import('./ModelProperty-CGdkocQ8.js');
|
|
4039
|
+
const ModelProperty = mod?.ModelProperty ?? mod?.default;
|
|
4040
|
+
if (!ModelProperty) {
|
|
4041
|
+
throw new Error('ModelProperty not available from dynamic import');
|
|
4042
|
+
}
|
|
3342
4043
|
// Check each property ID in the cache
|
|
3343
4044
|
const verifiedInstances = [];
|
|
3344
4045
|
const missingIds = [];
|
|
@@ -3498,6 +4199,23 @@ const schemaMachine = setup({
|
|
|
3498
4199
|
return newContext;
|
|
3499
4200
|
}),
|
|
3500
4201
|
},
|
|
4202
|
+
destroyStarted: {
|
|
4203
|
+
actions: assign({ _destroyInProgress: true, _destroyError: null }),
|
|
4204
|
+
},
|
|
4205
|
+
destroyDone: {
|
|
4206
|
+
actions: assign({ _destroyInProgress: false }),
|
|
4207
|
+
},
|
|
4208
|
+
destroyError: {
|
|
4209
|
+
actions: assign(({ event }) => ({
|
|
4210
|
+
_destroyInProgress: false,
|
|
4211
|
+
_destroyError: event.error instanceof Error
|
|
4212
|
+
? { message: event.error.message, name: event.error.name }
|
|
4213
|
+
: { message: String(event.error) },
|
|
4214
|
+
})),
|
|
4215
|
+
},
|
|
4216
|
+
clearDestroyError: {
|
|
4217
|
+
actions: assign({ _destroyError: null }),
|
|
4218
|
+
},
|
|
3501
4219
|
},
|
|
3502
4220
|
states: {
|
|
3503
4221
|
loading: {
|
|
@@ -4091,10 +4809,12 @@ class Schema {
|
|
|
4091
4809
|
});
|
|
4092
4810
|
// Note: Property getters/setters are now handled by the Proxy in create()
|
|
4093
4811
|
}
|
|
4094
|
-
static create(schemaName) {
|
|
4812
|
+
static create(schemaName, options) {
|
|
4095
4813
|
if (!schemaName) {
|
|
4096
4814
|
throw new Error('Schema name is required');
|
|
4097
4815
|
}
|
|
4816
|
+
const waitForReady = options?.waitForReady !== false;
|
|
4817
|
+
const readyTimeout = options?.readyTimeout ?? 5000;
|
|
4098
4818
|
// First, check if we have an instance cached by name
|
|
4099
4819
|
if (this.instanceCacheByName.has(schemaName)) {
|
|
4100
4820
|
const { instance, refCount } = this.instanceCacheByName.get(schemaName);
|
|
@@ -4102,7 +4822,9 @@ class Schema {
|
|
|
4102
4822
|
instance,
|
|
4103
4823
|
refCount: refCount + 1,
|
|
4104
4824
|
});
|
|
4105
|
-
|
|
4825
|
+
if (!waitForReady)
|
|
4826
|
+
return instance;
|
|
4827
|
+
return waitForEntityIdle(instance, { timeout: readyTimeout }).then(() => instance);
|
|
4106
4828
|
}
|
|
4107
4829
|
// Create new instance
|
|
4108
4830
|
const newInstance = new this(schemaName);
|
|
@@ -4425,9 +5147,10 @@ class Schema {
|
|
|
4425
5147
|
instance: proxiedInstance,
|
|
4426
5148
|
refCount: 1,
|
|
4427
5149
|
});
|
|
4428
|
-
|
|
4429
|
-
|
|
4430
|
-
|
|
5150
|
+
const schema = proxiedInstance;
|
|
5151
|
+
if (!waitForReady)
|
|
5152
|
+
return schema;
|
|
5153
|
+
return waitForEntityIdle(schema, { timeout: readyTimeout }).then(() => schema);
|
|
4431
5154
|
}
|
|
4432
5155
|
/**
|
|
4433
5156
|
* Update the cache to use schemaFileId as the key instead of schemaName
|
|
@@ -4464,7 +5187,7 @@ class Schema {
|
|
|
4464
5187
|
});
|
|
4465
5188
|
return instance;
|
|
4466
5189
|
}
|
|
4467
|
-
return
|
|
5190
|
+
return null;
|
|
4468
5191
|
}
|
|
4469
5192
|
/**
|
|
4470
5193
|
* Clear all cached Schema instances.
|
|
@@ -4511,17 +5234,15 @@ class Schema {
|
|
|
4511
5234
|
return cachedInstance;
|
|
4512
5235
|
}
|
|
4513
5236
|
// Query database to get schema name from ID
|
|
4514
|
-
const { BaseDb } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aI; });
|
|
4515
|
-
const { schemas: schemasTable } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aN; });
|
|
4516
5237
|
const db = BaseDb.getAppDb();
|
|
4517
5238
|
if (!db) {
|
|
4518
5239
|
throw new Error('Database not available');
|
|
4519
5240
|
}
|
|
4520
5241
|
const dbSchemas = await db
|
|
4521
5242
|
.select()
|
|
4522
|
-
.from(
|
|
4523
|
-
.where(eq(
|
|
4524
|
-
.orderBy(desc(
|
|
5243
|
+
.from(schemas)
|
|
5244
|
+
.where(eq(schemas.schemaFileId, schemaFileId))
|
|
5245
|
+
.orderBy(desc(schemas.version))
|
|
4525
5246
|
.limit(1);
|
|
4526
5247
|
if (dbSchemas.length === 0) {
|
|
4527
5248
|
throw new Error(`Schema with ID "${schemaFileId}" not found in database`);
|
|
@@ -4568,7 +5289,7 @@ class Schema {
|
|
|
4568
5289
|
* @returns Array of Schema instances
|
|
4569
5290
|
*/
|
|
4570
5291
|
static async all(options = {}) {
|
|
4571
|
-
const { includeAllVersions = false, includeInternal = false } = options;
|
|
5292
|
+
const { includeAllVersions = false, includeInternal = false, waitForReady = false, readyTimeout = 5000, } = options;
|
|
4572
5293
|
try {
|
|
4573
5294
|
// Use loadAllSchemasFromDb as single source of truth
|
|
4574
5295
|
// This intelligently merges database and file data, including drafts
|
|
@@ -4651,13 +5372,18 @@ class Schema {
|
|
|
4651
5372
|
}
|
|
4652
5373
|
catch (error) {
|
|
4653
5374
|
// Fallback to creating by name if createById fails
|
|
4654
|
-
schemaInstances.push(this.create(schemaName));
|
|
5375
|
+
schemaInstances.push(this.create(schemaName, { waitForReady: false }));
|
|
4655
5376
|
}
|
|
4656
5377
|
}
|
|
4657
5378
|
else {
|
|
4658
|
-
schemaInstances.push(this.create(schemaName));
|
|
5379
|
+
schemaInstances.push(this.create(schemaName, { waitForReady: false }));
|
|
4659
5380
|
}
|
|
4660
5381
|
}
|
|
5382
|
+
if (waitForReady && schemaInstances.length > 0) {
|
|
5383
|
+
await Promise.all(schemaInstances.map((s) => waitForEntityIdle(s, {
|
|
5384
|
+
timeout: readyTimeout,
|
|
5385
|
+
})));
|
|
5386
|
+
}
|
|
4661
5387
|
return schemaInstances;
|
|
4662
5388
|
}
|
|
4663
5389
|
catch (error) {
|
|
@@ -4695,7 +5421,12 @@ class Schema {
|
|
|
4695
5421
|
// Create Schema instances for each unique schema name
|
|
4696
5422
|
const schemaInstances = [];
|
|
4697
5423
|
for (const schemaName of uniqueSchemaNames) {
|
|
4698
|
-
schemaInstances.push(this.create(schemaName));
|
|
5424
|
+
schemaInstances.push(this.create(schemaName, { waitForReady: false }));
|
|
5425
|
+
}
|
|
5426
|
+
if (waitForReady && schemaInstances.length > 0) {
|
|
5427
|
+
await Promise.all(schemaInstances.map((s) => waitForEntityIdle(s, {
|
|
5428
|
+
timeout: readyTimeout,
|
|
5429
|
+
})));
|
|
4699
5430
|
}
|
|
4700
5431
|
return schemaInstances;
|
|
4701
5432
|
}
|
|
@@ -4738,6 +5469,7 @@ class Schema {
|
|
|
4738
5469
|
*/
|
|
4739
5470
|
get models() {
|
|
4740
5471
|
const context = this._getSnapshotContext();
|
|
5472
|
+
const schemaName = context.schemaName;
|
|
4741
5473
|
// Get model IDs from service context (reactive state)
|
|
4742
5474
|
const liveQueryIds = context._liveQueryModelIds || [];
|
|
4743
5475
|
// Get pending model IDs (not yet in DB)
|
|
@@ -4746,15 +5478,27 @@ class Schema {
|
|
|
4746
5478
|
const pendingIds = [];
|
|
4747
5479
|
// Combine and deduplicate
|
|
4748
5480
|
const allModelIds = [...new Set([...liveQueryIds, ...pendingIds])];
|
|
4749
|
-
// Get Model instances from static cache
|
|
5481
|
+
// Get Model instances from static cache (from schema load)
|
|
5482
|
+
const seen = new Set();
|
|
4750
5483
|
const modelInstances = [];
|
|
4751
5484
|
for (const modelFileId of allModelIds) {
|
|
4752
5485
|
const model = Model.getById(modelFileId);
|
|
4753
|
-
if (model) {
|
|
5486
|
+
if (model && modelFileId && !seen.has(modelFileId)) {
|
|
4754
5487
|
modelInstances.push(model);
|
|
5488
|
+
seen.add(modelFileId);
|
|
5489
|
+
}
|
|
5490
|
+
}
|
|
5491
|
+
// Include models created at runtime via Model.create() that belong to this schema
|
|
5492
|
+
// (they may not be in _liveQueryModelIds until schema context is updated)
|
|
5493
|
+
if (schemaName) {
|
|
5494
|
+
const cachedForSchema = Model.getCachedInstancesForSchema(schemaName);
|
|
5495
|
+
for (const model of cachedForSchema) {
|
|
5496
|
+
const id = model.id;
|
|
5497
|
+
if (id && !seen.has(id)) {
|
|
5498
|
+
modelInstances.push(model);
|
|
5499
|
+
seen.add(id);
|
|
5500
|
+
}
|
|
4755
5501
|
}
|
|
4756
|
-
// Note: Cannot create models asynchronously in this synchronous getter
|
|
4757
|
-
// Models will be created elsewhere when needed
|
|
4758
5502
|
}
|
|
4759
5503
|
// Return a new array reference (snapshot at time of access)
|
|
4760
5504
|
return [...modelInstances];
|
|
@@ -4871,9 +5615,6 @@ class Schema {
|
|
|
4871
5615
|
throw new ConflictError(errorMessage, conflictCheck);
|
|
4872
5616
|
}
|
|
4873
5617
|
const context = this._getSnapshotContext();
|
|
4874
|
-
const { BaseDb } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aI; });
|
|
4875
|
-
const { schemas: schemasTable } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aN; });
|
|
4876
|
-
const { addSchemaToDb } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aR; });
|
|
4877
5618
|
if (!context._isDraft || !context._editedProperties || context._editedProperties.size === 0) {
|
|
4878
5619
|
logger('No changes to save');
|
|
4879
5620
|
return '';
|
|
@@ -4913,8 +5654,14 @@ class Schema {
|
|
|
4913
5654
|
createdAt: new Date(currentSchema.metadata.createdAt).getTime(),
|
|
4914
5655
|
updatedAt: new Date(currentSchema.metadata.updatedAt).getTime(),
|
|
4915
5656
|
}, currentSchema.id, JSON.stringify(currentSchema, null, 2), true);
|
|
5657
|
+
const dbSchema = await db
|
|
5658
|
+
.select()
|
|
5659
|
+
.from(schemas)
|
|
5660
|
+
.where(eq(schemas.name, this.schemaName))
|
|
5661
|
+
.limit(1);
|
|
4916
5662
|
// Collect all edited properties and convert them to SchemaPropertyUpdate format
|
|
4917
5663
|
const propertyUpdates = [];
|
|
5664
|
+
const { ModelProperty } = await import('./ModelProperty-CGdkocQ8.js');
|
|
4918
5665
|
for (const propertyKey of context._editedProperties) {
|
|
4919
5666
|
// Skip schema-level changes (like schema name changes)
|
|
4920
5667
|
if (propertyKey === 'schema:name') {
|
|
@@ -4941,6 +5688,31 @@ class Schema {
|
|
|
4941
5688
|
propertyUpdates.push(propertyUpdate);
|
|
4942
5689
|
}
|
|
4943
5690
|
if (propertyUpdates.length === 0) {
|
|
5691
|
+
// When only new models were added, _editedProperties contains 'schema:models' and we write the full schema
|
|
5692
|
+
if (context._editedProperties.has('schema:models')) {
|
|
5693
|
+
const newFilePath = await writeFullSchemaNewVersion(this.schemaName, currentSchema);
|
|
5694
|
+
const fileContent = await BaseFileManager.readFileAsString(newFilePath);
|
|
5695
|
+
const publishedSchema = JSON.parse(fileContent);
|
|
5696
|
+
if (dbSchema.length > 0) {
|
|
5697
|
+
await db
|
|
5698
|
+
.update(schemas)
|
|
5699
|
+
.set({
|
|
5700
|
+
isDraft: false,
|
|
5701
|
+
isEdited: false,
|
|
5702
|
+
schemaFileId: publishedSchema.id,
|
|
5703
|
+
schemaData: JSON.stringify(publishedSchema, null, 2),
|
|
5704
|
+
version: publishedSchema.version,
|
|
5705
|
+
updatedAt: new Date(publishedSchema.metadata.updatedAt).getTime(),
|
|
5706
|
+
})
|
|
5707
|
+
.where(eq(schemas.id, dbSchema[0].id));
|
|
5708
|
+
}
|
|
5709
|
+
this._service.send({
|
|
5710
|
+
type: 'clearDraft',
|
|
5711
|
+
_dbUpdatedAt: new Date(publishedSchema.metadata.updatedAt).getTime(),
|
|
5712
|
+
_dbVersion: publishedSchema.version,
|
|
5713
|
+
});
|
|
5714
|
+
return newFilePath;
|
|
5715
|
+
}
|
|
4944
5716
|
logger('No valid property updates to save');
|
|
4945
5717
|
return '';
|
|
4946
5718
|
}
|
|
@@ -4948,18 +5720,12 @@ class Schema {
|
|
|
4948
5720
|
const newFilePath = await updateModelProperties(this.schemaName, propertyUpdates);
|
|
4949
5721
|
// STEP 3: After file is written, update database to mark as published (isDraft = false)
|
|
4950
5722
|
// Load the file to get the final schema with IDs
|
|
4951
|
-
const { BaseFileManager } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aK; });
|
|
4952
5723
|
const fileContent = await BaseFileManager.readFileAsString(newFilePath);
|
|
4953
5724
|
const publishedSchema = JSON.parse(fileContent);
|
|
4954
5725
|
// Update database record: set isDraft = false and update schemaFileId
|
|
4955
|
-
const dbSchema = await db
|
|
4956
|
-
.select()
|
|
4957
|
-
.from(schemasTable)
|
|
4958
|
-
.where(eq(schemasTable.name, this.schemaName))
|
|
4959
|
-
.limit(1);
|
|
4960
5726
|
if (dbSchema.length > 0) {
|
|
4961
5727
|
await db
|
|
4962
|
-
.update(
|
|
5728
|
+
.update(schemas)
|
|
4963
5729
|
.set({
|
|
4964
5730
|
isDraft: false,
|
|
4965
5731
|
isEdited: false, // Clear isEdited flag after saving to file
|
|
@@ -4968,7 +5734,7 @@ class Schema {
|
|
|
4968
5734
|
version: publishedSchema.version,
|
|
4969
5735
|
updatedAt: new Date(publishedSchema.metadata.updatedAt).getTime(),
|
|
4970
5736
|
})
|
|
4971
|
-
.where(eq(
|
|
5737
|
+
.where(eq(schemas.id, dbSchema[0].id));
|
|
4972
5738
|
}
|
|
4973
5739
|
else {
|
|
4974
5740
|
// Create new record if it doesn't exist (shouldn't happen, but safety)
|
|
@@ -4987,7 +5753,6 @@ class Schema {
|
|
|
4987
5753
|
_dbVersion: publishedSchema.version,
|
|
4988
5754
|
});
|
|
4989
5755
|
// Clear edited flags on all ModelProperty instances and in database
|
|
4990
|
-
const { properties: propertiesTable, models: modelsTable } = await import('./index-r45w9hEq.js').then(function (n) { return n.s; });
|
|
4991
5756
|
for (const propertyKey of context._editedProperties) {
|
|
4992
5757
|
const [modelName, propertyName] = propertyKey.split(':');
|
|
4993
5758
|
const cacheKey = `${modelName}:${propertyName}`;
|
|
@@ -5005,23 +5770,23 @@ class Schema {
|
|
|
5005
5770
|
if (db && modelName && propertyName) {
|
|
5006
5771
|
// Find model by name
|
|
5007
5772
|
const modelRecords = await db
|
|
5008
|
-
.select({ id:
|
|
5009
|
-
.from(
|
|
5010
|
-
.where(eq(
|
|
5773
|
+
.select({ id: models.id })
|
|
5774
|
+
.from(models)
|
|
5775
|
+
.where(eq(models.name, modelName))
|
|
5011
5776
|
.limit(1);
|
|
5012
5777
|
if (modelRecords.length > 0) {
|
|
5013
5778
|
// Find property by name and modelId
|
|
5014
5779
|
const propertyRecords = await db
|
|
5015
|
-
.select({ id:
|
|
5016
|
-
.from(
|
|
5017
|
-
.where(and(eq(
|
|
5780
|
+
.select({ id: properties.id })
|
|
5781
|
+
.from(properties)
|
|
5782
|
+
.where(and(eq(properties.name, propertyName), eq(properties.modelId, modelRecords[0].id)))
|
|
5018
5783
|
.limit(1);
|
|
5019
5784
|
if (propertyRecords.length > 0) {
|
|
5020
5785
|
// Clear isEdited flag in database
|
|
5021
5786
|
await db
|
|
5022
|
-
.update(
|
|
5787
|
+
.update(properties)
|
|
5023
5788
|
.set({ isEdited: false })
|
|
5024
|
-
.where(eq(
|
|
5789
|
+
.where(eq(properties.id, propertyRecords[0].id));
|
|
5025
5790
|
}
|
|
5026
5791
|
}
|
|
5027
5792
|
}
|
|
@@ -5035,19 +5800,18 @@ class Schema {
|
|
|
5035
5800
|
try {
|
|
5036
5801
|
if (db && context._dbId) {
|
|
5037
5802
|
// Get all models for this schema
|
|
5038
|
-
const { modelSchemas, models: modelsTable } = await import('./index-r45w9hEq.js').then(function (n) { return n.s; });
|
|
5039
5803
|
const modelRecords = await db
|
|
5040
|
-
.select({ id:
|
|
5804
|
+
.select({ id: models.id })
|
|
5041
5805
|
.from(modelSchemas)
|
|
5042
|
-
.innerJoin(
|
|
5806
|
+
.innerJoin(models, eq(modelSchemas.modelId, models.id))
|
|
5043
5807
|
.where(eq(modelSchemas.schemaId, context._dbId));
|
|
5044
5808
|
// Clear isEdited flag for all models
|
|
5045
5809
|
for (const modelRecord of modelRecords) {
|
|
5046
5810
|
if (modelRecord.id) {
|
|
5047
5811
|
await db
|
|
5048
|
-
.update(
|
|
5812
|
+
.update(models)
|
|
5049
5813
|
.set({ isEdited: false })
|
|
5050
|
-
.where(eq(
|
|
5814
|
+
.where(eq(models.id, modelRecord.id));
|
|
5051
5815
|
}
|
|
5052
5816
|
}
|
|
5053
5817
|
}
|
|
@@ -5150,9 +5914,6 @@ class Schema {
|
|
|
5150
5914
|
saveDraftLogger(`Client check: cacheIsStale=${cacheIsStale}, shouldRecheck=${shouldRecheck}, cachedValue=${cachedClientInitialized}, timeSinceCheck=${now - clientCheckTime}ms`);
|
|
5151
5915
|
if (shouldRecheck) {
|
|
5152
5916
|
try {
|
|
5153
|
-
// Use dynamic import for browser compatibility (require() doesn't work in browsers)
|
|
5154
|
-
const { getClient } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aV; });
|
|
5155
|
-
const { ClientManagerState } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aM; });
|
|
5156
5917
|
const client = getClient();
|
|
5157
5918
|
const clientSnapshot = client.getService().getSnapshot();
|
|
5158
5919
|
// Check if state is IDLE (primary check) - isInitialized is set in entry action so should be true
|
|
@@ -5223,10 +5984,6 @@ class Schema {
|
|
|
5223
5984
|
_editedProperties: new Set(),
|
|
5224
5985
|
};
|
|
5225
5986
|
}
|
|
5226
|
-
const { addSchemaToDb } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aR; });
|
|
5227
|
-
const { generateId } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aL; });
|
|
5228
|
-
const { BaseDb } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aI; });
|
|
5229
|
-
const { schemas: schemasTable } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aN; });
|
|
5230
5987
|
const db = BaseDb.getAppDb();
|
|
5231
5988
|
if (!db) {
|
|
5232
5989
|
throw new Error('Database not found');
|
|
@@ -5246,8 +6003,8 @@ class Schema {
|
|
|
5246
6003
|
logger(`Looking up schema by schemaFileId: ${context.id}`);
|
|
5247
6004
|
const schemasById = await db
|
|
5248
6005
|
.select()
|
|
5249
|
-
.from(
|
|
5250
|
-
.where(eq(
|
|
6006
|
+
.from(schemas)
|
|
6007
|
+
.where(eq(schemas.schemaFileId, context.id)) // id is now the schemaFileId (string)
|
|
5251
6008
|
.limit(1);
|
|
5252
6009
|
if (schemasById.length > 0) {
|
|
5253
6010
|
const foundRecord = schemasById[0];
|
|
@@ -5267,8 +6024,8 @@ class Schema {
|
|
|
5267
6024
|
logger(`Looking up schema by name "${lookupName}" (oldName: ${oldName}, finalNewName: ${finalNewName})`);
|
|
5268
6025
|
const existingSchemas = await db
|
|
5269
6026
|
.select()
|
|
5270
|
-
.from(
|
|
5271
|
-
.where(eq(
|
|
6027
|
+
.from(schemas)
|
|
6028
|
+
.where(eq(schemas.name, lookupName))
|
|
5272
6029
|
.limit(10); // Get multiple to find drafts
|
|
5273
6030
|
logger(`Found ${existingSchemas.length} records with name "${lookupName}"`);
|
|
5274
6031
|
// If name changed, prefer draft records; otherwise prefer any record
|
|
@@ -5358,14 +6115,14 @@ class Schema {
|
|
|
5358
6115
|
saveDraftLogger(`Setting schemaFileId to ${finalSchemaFileId} (was null)`);
|
|
5359
6116
|
}
|
|
5360
6117
|
await db
|
|
5361
|
-
.update(
|
|
6118
|
+
.update(schemas)
|
|
5362
6119
|
.set(updateData)
|
|
5363
|
-
.where(eq(
|
|
6120
|
+
.where(eq(schemas.id, existingSchemaRecord.id));
|
|
5364
6121
|
// Verify what was saved by reading it back
|
|
5365
6122
|
const verifyRecord = await db
|
|
5366
6123
|
.select()
|
|
5367
|
-
.from(
|
|
5368
|
-
.where(eq(
|
|
6124
|
+
.from(schemas)
|
|
6125
|
+
.where(eq(schemas.id, existingSchemaRecord.id))
|
|
5369
6126
|
.limit(1);
|
|
5370
6127
|
if (verifyRecord.length > 0 && verifyRecord[0].schemaData) {
|
|
5371
6128
|
try {
|
|
@@ -5375,15 +6132,15 @@ class Schema {
|
|
|
5375
6132
|
saveDraftLogger(`ERROR: isDraft is not true after save! Expected true, got ${verifyRecord[0].isDraft}. This will cause the schema to load from file instead of database!`);
|
|
5376
6133
|
// Try to fix it immediately
|
|
5377
6134
|
await db
|
|
5378
|
-
.update(
|
|
6135
|
+
.update(schemas)
|
|
5379
6136
|
.set({ isDraft: true })
|
|
5380
|
-
.where(eq(
|
|
6137
|
+
.where(eq(schemas.id, existingSchemaRecord.id));
|
|
5381
6138
|
saveDraftLogger(`Attempted to fix isDraft by setting it to true again`);
|
|
5382
6139
|
// Verify the fix
|
|
5383
6140
|
const fixedRecord = await db
|
|
5384
6141
|
.select()
|
|
5385
|
-
.from(
|
|
5386
|
-
.where(eq(
|
|
6142
|
+
.from(schemas)
|
|
6143
|
+
.where(eq(schemas.id, existingSchemaRecord.id))
|
|
5387
6144
|
.limit(1);
|
|
5388
6145
|
if (fixedRecord.length > 0) {
|
|
5389
6146
|
saveDraftLogger(`After fix attempt: isDraft=${fixedRecord[0].isDraft}`);
|
|
@@ -5425,8 +6182,8 @@ class Schema {
|
|
|
5425
6182
|
// Try to find by looking for ANY draft with the old name (even if it doesn't match exactly)
|
|
5426
6183
|
const allDrafts = await db
|
|
5427
6184
|
.select()
|
|
5428
|
-
.from(
|
|
5429
|
-
.where(eq(
|
|
6185
|
+
.from(schemas)
|
|
6186
|
+
.where(eq(schemas.name, oldName))
|
|
5430
6187
|
.limit(1);
|
|
5431
6188
|
if (allDrafts.length > 0 && allDrafts[0].id) {
|
|
5432
6189
|
const foundRecord = allDrafts[0];
|
|
@@ -5434,7 +6191,7 @@ class Schema {
|
|
|
5434
6191
|
// Update the existing record with the new name
|
|
5435
6192
|
// CRITICAL: Ensure schemaFileId matches schema.id
|
|
5436
6193
|
await db
|
|
5437
|
-
.update(
|
|
6194
|
+
.update(schemas)
|
|
5438
6195
|
.set({
|
|
5439
6196
|
name: finalNewName,
|
|
5440
6197
|
schemaData: JSON.stringify(currentSchema, null, 2),
|
|
@@ -5443,7 +6200,7 @@ class Schema {
|
|
|
5443
6200
|
updatedAt: new Date(currentSchema.metadata.updatedAt).getTime(),
|
|
5444
6201
|
isDraft: true, // Ensure it's marked as a draft when saving via _saveDraftToDb
|
|
5445
6202
|
})
|
|
5446
|
-
.where(eq(
|
|
6203
|
+
.where(eq(schemas.id, foundRecord.id));
|
|
5447
6204
|
// Update context with id (schemaFileId) and conflict detection metadata
|
|
5448
6205
|
try {
|
|
5449
6206
|
const snapshot = this._service.getSnapshot();
|
|
@@ -5554,6 +6311,68 @@ class Schema {
|
|
|
5554
6311
|
}
|
|
5555
6312
|
}
|
|
5556
6313
|
}
|
|
6314
|
+
/**
|
|
6315
|
+
* Destroy the schema instance completely: remove from caches, delete from database (cascade),
|
|
6316
|
+
* and stop the service. Uses shared destroy helpers.
|
|
6317
|
+
*/
|
|
6318
|
+
async destroy() {
|
|
6319
|
+
const context = this._getSnapshotContext();
|
|
6320
|
+
const schemaFileId = context.id;
|
|
6321
|
+
const schemaName = context.schemaName;
|
|
6322
|
+
clearDestroySubscriptions(this, {
|
|
6323
|
+
instanceState: schemaInstanceState,
|
|
6324
|
+
onUnload: () => schemaInstanceState.delete(this),
|
|
6325
|
+
});
|
|
6326
|
+
const cacheKeys = [];
|
|
6327
|
+
if (schemaFileId)
|
|
6328
|
+
cacheKeys.push(schemaFileId);
|
|
6329
|
+
if (schemaName)
|
|
6330
|
+
cacheKeys.push(schemaName);
|
|
6331
|
+
forceRemoveFromCaches(this, {
|
|
6332
|
+
getCacheKeys: () => cacheKeys,
|
|
6333
|
+
caches: [
|
|
6334
|
+
Schema.instanceCacheById,
|
|
6335
|
+
Schema.instanceCacheByName,
|
|
6336
|
+
],
|
|
6337
|
+
});
|
|
6338
|
+
await runDestroyLifecycle(this, {
|
|
6339
|
+
getService: (instance) => instance._service,
|
|
6340
|
+
doDestroy: async () => {
|
|
6341
|
+
const db = BaseDb.getAppDb();
|
|
6342
|
+
if (!db || !schemaFileId)
|
|
6343
|
+
return;
|
|
6344
|
+
const schemaRecords = await db
|
|
6345
|
+
.select({ id: schemas.id })
|
|
6346
|
+
.from(schemas)
|
|
6347
|
+
.where(eq(schemas.schemaFileId, schemaFileId));
|
|
6348
|
+
if (schemaRecords.length === 0)
|
|
6349
|
+
return;
|
|
6350
|
+
const schemaIds = schemaRecords
|
|
6351
|
+
.map((r) => r.id)
|
|
6352
|
+
.filter((id) => id != null);
|
|
6353
|
+
if (schemaIds.length === 0)
|
|
6354
|
+
return;
|
|
6355
|
+
const { inArray } = await import('drizzle-orm');
|
|
6356
|
+
const joinRows = await db
|
|
6357
|
+
.select({ modelId: modelSchemas.modelId })
|
|
6358
|
+
.from(modelSchemas)
|
|
6359
|
+
.where(inArray(modelSchemas.schemaId, schemaIds));
|
|
6360
|
+
const modelIds = [
|
|
6361
|
+
...new Set(joinRows
|
|
6362
|
+
.map((r) => r.modelId)
|
|
6363
|
+
.filter((id) => id != null)),
|
|
6364
|
+
];
|
|
6365
|
+
await db.delete(modelSchemas).where(inArray(modelSchemas.schemaId, schemaIds));
|
|
6366
|
+
for (const modelId of modelIds) {
|
|
6367
|
+
await db.delete(properties).where(eq(properties.modelId, modelId));
|
|
6368
|
+
}
|
|
6369
|
+
for (const modelId of modelIds) {
|
|
6370
|
+
await db.delete(models).where(eq(models.id, modelId));
|
|
6371
|
+
}
|
|
6372
|
+
await db.delete(schemas).where(eq(schemas.schemaFileId, schemaFileId));
|
|
6373
|
+
},
|
|
6374
|
+
});
|
|
6375
|
+
}
|
|
5557
6376
|
/**
|
|
5558
6377
|
* Set up liveQuery subscription to watch for model changes in the database
|
|
5559
6378
|
* This enables cross-instance synchronization (e.g., changes in other tabs/windows)
|
|
@@ -5565,9 +6384,6 @@ class Schema {
|
|
|
5565
6384
|
}
|
|
5566
6385
|
setupEntityLiveQuery(this, {
|
|
5567
6386
|
getEntityId: async (schema) => {
|
|
5568
|
-
const { BaseDb } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aI; });
|
|
5569
|
-
const { schemas: schemasTable } = await import('./index-r45w9hEq.js').then(function (n) { return n.s; });
|
|
5570
|
-
const { eq } = await import('drizzle-orm');
|
|
5571
6387
|
const db = BaseDb.getAppDb();
|
|
5572
6388
|
if (!db) {
|
|
5573
6389
|
return undefined;
|
|
@@ -5579,8 +6395,8 @@ class Schema {
|
|
|
5579
6395
|
}
|
|
5580
6396
|
const schemaRecords = await db
|
|
5581
6397
|
.select()
|
|
5582
|
-
.from(
|
|
5583
|
-
.where(eq(
|
|
6398
|
+
.from(schemas)
|
|
6399
|
+
.where(eq(schemas.name, schemaName))
|
|
5584
6400
|
.limit(1);
|
|
5585
6401
|
if (schemaRecords.length === 0 || !schemaRecords[0].id) {
|
|
5586
6402
|
return undefined;
|
|
@@ -5588,9 +6404,6 @@ class Schema {
|
|
|
5588
6404
|
return schemaRecords[0].id;
|
|
5589
6405
|
},
|
|
5590
6406
|
buildQuery: async (schemaId) => {
|
|
5591
|
-
const { BaseDb } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aI; });
|
|
5592
|
-
const { modelSchemas, models: modelsTable } = await import('./index-r45w9hEq.js').then(function (n) { return n.s; });
|
|
5593
|
-
const { eq } = await import('drizzle-orm');
|
|
5594
6407
|
const db = BaseDb.getAppDb();
|
|
5595
6408
|
if (!db) {
|
|
5596
6409
|
throw new Error('Database not available');
|
|
@@ -5598,11 +6411,11 @@ class Schema {
|
|
|
5598
6411
|
return BaseDb.liveQuery(db
|
|
5599
6412
|
.select({
|
|
5600
6413
|
modelId: modelSchemas.modelId,
|
|
5601
|
-
modelName:
|
|
5602
|
-
modelFileId:
|
|
6414
|
+
modelName: models.name,
|
|
6415
|
+
modelFileId: models.schemaFileId,
|
|
5603
6416
|
})
|
|
5604
6417
|
.from(modelSchemas)
|
|
5605
|
-
.innerJoin(
|
|
6418
|
+
.innerJoin(models, eq(modelSchemas.modelId, models.id))
|
|
5606
6419
|
.where(eq(modelSchemas.schemaId, typeof schemaId === 'string' ? parseInt(schemaId, 10) : schemaId)));
|
|
5607
6420
|
},
|
|
5608
6421
|
extractEntityIds: (rows) => rows.map(row => row.modelFileId).filter(Boolean),
|
|
@@ -5662,15 +6475,11 @@ class Schema {
|
|
|
5662
6475
|
}
|
|
5663
6476
|
},
|
|
5664
6477
|
createChildInstances: async (ids) => {
|
|
5665
|
-
const { Model } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aS; });
|
|
5666
6478
|
for (const id of ids) {
|
|
5667
6479
|
await Model.createById(id);
|
|
5668
6480
|
}
|
|
5669
6481
|
},
|
|
5670
6482
|
queryInitialData: async (schemaId) => {
|
|
5671
|
-
const { BaseDb } = await import('./json-I3vJhXo8.js').then(function (n) { return n.aI; });
|
|
5672
|
-
const { modelSchemas, models: modelsTable } = await import('./index-r45w9hEq.js').then(function (n) { return n.s; });
|
|
5673
|
-
const { eq } = await import('drizzle-orm');
|
|
5674
6483
|
const db = BaseDb.getAppDb();
|
|
5675
6484
|
if (!db) {
|
|
5676
6485
|
return [];
|
|
@@ -5681,11 +6490,11 @@ class Schema {
|
|
|
5681
6490
|
const initialModels = await db
|
|
5682
6491
|
.select({
|
|
5683
6492
|
modelId: modelSchemas.modelId,
|
|
5684
|
-
modelName:
|
|
5685
|
-
modelFileId:
|
|
6493
|
+
modelName: models.name,
|
|
6494
|
+
modelFileId: models.schemaFileId,
|
|
5686
6495
|
})
|
|
5687
6496
|
.from(modelSchemas)
|
|
5688
|
-
.innerJoin(
|
|
6497
|
+
.innerJoin(models, eq(modelSchemas.modelId, models.id))
|
|
5689
6498
|
.where(eq(modelSchemas.schemaId, typeof schemaId === 'string' ? parseInt(schemaId, 10) : schemaId));
|
|
5690
6499
|
logger(`[Schema._setupLiveQuerySubscription] Initial query found ${initialModels.length} models`);
|
|
5691
6500
|
if (initialModels.length > 0) {
|
|
@@ -5741,5 +6550,5 @@ var Schema$1 = /*#__PURE__*/Object.freeze({
|
|
|
5741
6550
|
schemaInstanceState: schemaInstanceState
|
|
5742
6551
|
});
|
|
5743
6552
|
|
|
5744
|
-
export { Schema,
|
|
5745
|
-
//# sourceMappingURL=Schema-
|
|
6553
|
+
export { Schema as S, deleteModelFromSchema as a, Schema$1 as b, convertPropertyToSchemaUpdate as c, deletePropertyFromModel as d, renameModelProperty as r, updateModelProperties as u };
|
|
6554
|
+
//# sourceMappingURL=Schema-D1eqDHyt.js.map
|