@dnax/core 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +15 -0
- package/ai/gemini.ts +97 -0
- package/ai/index.ts +2 -0
- package/ai/mistral.ts +63 -0
- package/app/ctrl.ts +26 -0
- package/app/hono.ts +398 -0
- package/app/index.ts +72 -0
- package/config/index.ts +68 -0
- package/define/index.ts +35 -0
- package/driver/index.ts +19 -0
- package/driver/mongo/@types.ts +44 -0
- package/driver/mongo/connect.ts +26 -0
- package/driver/mongo/index.ts +4 -0
- package/driver/mongo/rest.ts +1214 -0
- package/driver/mongo/utils.ts +361 -0
- package/index.ts +11 -0
- package/lib/asyncLocalStorage.ts +47 -0
- package/lib/collection.ts +191 -0
- package/lib/endpoint.ts +36 -0
- package/lib/index.ts +26 -0
- package/lib/media.ts +74 -0
- package/lib/schema.ts +112 -0
- package/lib/service.ts +43 -0
- package/lib/socket.ts +51 -0
- package/lib/studio.ts +12 -0
- package/lib/tenant.ts +9 -0
- package/package.json +38 -0
- package/tsconfig.json +27 -0
- package/types/index.ts +377 -0
- package/utils/index.ts +251 -0
|
@@ -0,0 +1,361 @@
|
|
|
1
|
+
import type { findParam } from "./@types";
|
|
2
|
+
import type { Actions, Collection } from "../../types";
|
|
3
|
+
import { ObjectId } from "mongodb";
|
|
4
|
+
import { mapKeys, omit } from "radash";
|
|
5
|
+
import { getCollection, getFieldCollection } from "../../lib/collection";
|
|
6
|
+
import { isDate } from "../../utils";
|
|
7
|
+
import generateUniqueId from "generate-unique-id";
|
|
8
|
+
import { Cfg } from "../../config";
|
|
9
|
+
function buildPipeline(params: findParam, col?: Collection | undefined | null) {
|
|
10
|
+
let pipeline = [];
|
|
11
|
+
|
|
12
|
+
// $match
|
|
13
|
+
if (params?.$match) {
|
|
14
|
+
pipeline.push({
|
|
15
|
+
$match: params.$match,
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
// $skip
|
|
20
|
+
if (params?.$skip) {
|
|
21
|
+
pipeline.push({
|
|
22
|
+
$skip: Number(params?.$skip) || 0,
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
// set Limit
|
|
27
|
+
if (params?.$limit !== 0 && params?.$limit !== -1) {
|
|
28
|
+
pipeline.push({
|
|
29
|
+
$limit:
|
|
30
|
+
params?.$limit ||
|
|
31
|
+
Number(process.env?.QUERY_LIMIT) ||
|
|
32
|
+
Number(Cfg?.server?.query_limit) ||
|
|
33
|
+
100,
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
// include
|
|
38
|
+
|
|
39
|
+
if (params?.$include?.length) {
|
|
40
|
+
params.$include.map((inc) => {
|
|
41
|
+
// AutoInclue by string
|
|
42
|
+
|
|
43
|
+
if (typeof inc === "string" && col) {
|
|
44
|
+
let field = getFieldCollection(inc, col);
|
|
45
|
+
if (field) {
|
|
46
|
+
pipeline.push({
|
|
47
|
+
$lookup: {
|
|
48
|
+
from: field.relationTo,
|
|
49
|
+
localField: inc,
|
|
50
|
+
foreignField: "_id",
|
|
51
|
+
as: inc,
|
|
52
|
+
},
|
|
53
|
+
});
|
|
54
|
+
if (field.relationType == "ref-to-one") {
|
|
55
|
+
pipeline.push({
|
|
56
|
+
$unwind: {
|
|
57
|
+
path: `$${inc}`,
|
|
58
|
+
preserveNullAndEmptyArrays: true,
|
|
59
|
+
},
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
if (typeof inc === "object") {
|
|
66
|
+
let field = getFieldCollection(inc?.localField, col);
|
|
67
|
+
pipeline.push({
|
|
68
|
+
$lookup: {
|
|
69
|
+
from: inc?.from || field?.relationTo,
|
|
70
|
+
localField: inc?.localField || field?.name,
|
|
71
|
+
foreignField: inc?.foreignField || "_id",
|
|
72
|
+
as: inc?.as || inc?.localField || field?.name,
|
|
73
|
+
pipeline: inc?.pipeline || [],
|
|
74
|
+
},
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
if (field?.relationType == "ref-to-one") {
|
|
78
|
+
pipeline.push({
|
|
79
|
+
$unwind: {
|
|
80
|
+
path: `$${inc?.as || inc?.localField || field?.name}`,
|
|
81
|
+
preserveNullAndEmptyArrays: true,
|
|
82
|
+
},
|
|
83
|
+
});
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
if (inc?.unwind) {
|
|
87
|
+
pipeline.push({
|
|
88
|
+
$unwind: {
|
|
89
|
+
path: `$${inc?.as || inc?.localField || field?.name}`,
|
|
90
|
+
preserveNullAndEmptyArrays: true,
|
|
91
|
+
},
|
|
92
|
+
});
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// $matchInclude
|
|
99
|
+
if (params?.$matchInclude) {
|
|
100
|
+
pipeline.push({
|
|
101
|
+
$match: params.$matchInclude,
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
// project
|
|
106
|
+
if (params.$project) {
|
|
107
|
+
pipeline.push({
|
|
108
|
+
$project: params.$project,
|
|
109
|
+
});
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// sorted
|
|
113
|
+
if (!params?.$sort?.createdAt) {
|
|
114
|
+
pipeline.push({
|
|
115
|
+
$sort: {
|
|
116
|
+
...(params?.$sort || {}),
|
|
117
|
+
createdAt: -1,
|
|
118
|
+
},
|
|
119
|
+
});
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
// sample aleatoire
|
|
123
|
+
if (params?.$sample) {
|
|
124
|
+
pipeline.push({
|
|
125
|
+
$sample: params.$sample,
|
|
126
|
+
});
|
|
127
|
+
}
|
|
128
|
+
return pipeline || [];
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
function transformAllDate(data: object) {
|
|
132
|
+
try {
|
|
133
|
+
if (typeof data == "object" && data !== null && data !== undefined) {
|
|
134
|
+
mapKeys(data, (key, value) => {
|
|
135
|
+
if (typeof value == "string" && value && isDate(value)) {
|
|
136
|
+
data[key] = new Date(value);
|
|
137
|
+
} else {
|
|
138
|
+
return transformAllDate(data[key]);
|
|
139
|
+
}
|
|
140
|
+
});
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
//console.log(data);
|
|
144
|
+
return data;
|
|
145
|
+
} catch (error) {
|
|
146
|
+
console.log(error);
|
|
147
|
+
|
|
148
|
+
throw error;
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
function toBson<T>(
|
|
153
|
+
data: object | T,
|
|
154
|
+
options?: {
|
|
155
|
+
action?:
|
|
156
|
+
| "insertOne"
|
|
157
|
+
| "insertMany"
|
|
158
|
+
| "updateOne"
|
|
159
|
+
| "updateMany"
|
|
160
|
+
| "deleteOne"
|
|
161
|
+
| "deleteMany"
|
|
162
|
+
| "find"
|
|
163
|
+
| "findOne"
|
|
164
|
+
| "aggregate";
|
|
165
|
+
}
|
|
166
|
+
): T {
|
|
167
|
+
if (data) {
|
|
168
|
+
// InsertOne
|
|
169
|
+
if (options?.action == "insertOne") {
|
|
170
|
+
data.createdAt = new Date();
|
|
171
|
+
data.updatedAt = new Date();
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
// insertMany
|
|
175
|
+
if (options?.action == "insertMany" && Array.isArray(data)) {
|
|
176
|
+
data?.map((d) => {
|
|
177
|
+
d.createdAt = new Date();
|
|
178
|
+
d.updatedAt = new Date();
|
|
179
|
+
});
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
// updateOne
|
|
183
|
+
if (options?.action == "updateOne") {
|
|
184
|
+
data.updateAt = new Date();
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
// updateMany
|
|
188
|
+
if (options?.action == "updateMany") {
|
|
189
|
+
data?.map((d) => {
|
|
190
|
+
d.updatedAt = new Date();
|
|
191
|
+
});
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
// start Transformation to Bson
|
|
195
|
+
if (typeof data == "string" && ObjectId.isValid(data)) {
|
|
196
|
+
data = new ObjectId(data);
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
if (typeof data == "string" && isDate(data)) {
|
|
200
|
+
if (isDate(data)) data = new Date(data);
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
// for object
|
|
204
|
+
if (typeof data == "object" && !Array.isArray(data)) {
|
|
205
|
+
mapKeys(data, (key, value) => {
|
|
206
|
+
data[key] = toBson(value);
|
|
207
|
+
});
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
// for array
|
|
211
|
+
if (typeof data == "object" && Array.isArray(data)) {
|
|
212
|
+
mapKeys(data, (key, value) => {
|
|
213
|
+
data[key] = toBson(value);
|
|
214
|
+
});
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
return data;
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
function deepSetId(col: Collection, data: any) {
|
|
221
|
+
if (col) {
|
|
222
|
+
col?.fields?.map((f) => {
|
|
223
|
+
if (f?.type == "array" && !data[f?.name]) {
|
|
224
|
+
data[f.name] = [];
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
if (f?.type == "relationship" && f?.relationType == "ref-to-many") {
|
|
228
|
+
data[f.name] = [];
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
if (f?.type == "relationship" && data[f.name]) {
|
|
232
|
+
if (typeof data[f.name] == "object" && !Array.isArray(data[f.name])) {
|
|
233
|
+
data[f.name] = data[f.name]?._id || data[f.name]?.id || data[f.name];
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
if (Array.isArray(data[f.name])) {
|
|
237
|
+
data[f.name].map((d, index) => {
|
|
238
|
+
d = d?._id || d?.id || d;
|
|
239
|
+
data[f.name][index] = d;
|
|
240
|
+
});
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
});
|
|
244
|
+
}
|
|
245
|
+
return data;
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
async function hashPasswordAuto(data: any, col: Collection) {
|
|
249
|
+
if (col) {
|
|
250
|
+
for await (let f of col?.fields || []) {
|
|
251
|
+
if (f.type == "password" && data[f?.name]) {
|
|
252
|
+
data[f.name] = await Bun.password.hash(data[f?.name]);
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
return data;
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
async function cleanData(data: any, col: Collection) {
|
|
261
|
+
data = omit(data, ["_id", "id", "createdAt", "updatedAt"]);
|
|
262
|
+
|
|
263
|
+
col.fields?.map((f) => {
|
|
264
|
+
if (f.random && data[f.name]) {
|
|
265
|
+
try {
|
|
266
|
+
delete data[f.name];
|
|
267
|
+
} catch (e) {}
|
|
268
|
+
}
|
|
269
|
+
});
|
|
270
|
+
|
|
271
|
+
return data;
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
async function randomCode(
|
|
275
|
+
data: any,
|
|
276
|
+
col: Collection,
|
|
277
|
+
action?: Actions
|
|
278
|
+
): Promise<any> {
|
|
279
|
+
var code = null;
|
|
280
|
+
for await (let f of col?.fields || []) {
|
|
281
|
+
if (f?.random) {
|
|
282
|
+
code = generateUniqueId({
|
|
283
|
+
length: f?.random?.length || 6,
|
|
284
|
+
useLetters: f?.random?.useLetters ?? false,
|
|
285
|
+
useNumbers: f?.random?.useNumbers ?? true,
|
|
286
|
+
includeSymbols: f?.random?.includeSymbols || [],
|
|
287
|
+
excludeSymbols: f?.random?.excludeSymbols || [],
|
|
288
|
+
});
|
|
289
|
+
|
|
290
|
+
if (f?.random?.startWith) {
|
|
291
|
+
code = f?.random?.startWith + code;
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
if (f?.random?.endWith) {
|
|
295
|
+
code = code + f?.random?.endWith;
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
if (f?.random?.toLowerCase) {
|
|
299
|
+
code = code?.toLowerCase();
|
|
300
|
+
}
|
|
301
|
+
if (f?.random?.toUpperCase) {
|
|
302
|
+
code = code?.toUpperCase();
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
if (f?.random?.toNumber) {
|
|
306
|
+
code = Number(code);
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
data[f.name] = code;
|
|
310
|
+
// if unique
|
|
311
|
+
if (f?.unique) {
|
|
312
|
+
let tenant = Cfg.tenants.find((t) => t.id == col.tenant_id);
|
|
313
|
+
|
|
314
|
+
if (tenant) {
|
|
315
|
+
let doc = await tenant.database.db
|
|
316
|
+
?.collection(col.slug)
|
|
317
|
+
.findOne({ [f.name]: code });
|
|
318
|
+
|
|
319
|
+
if (doc) {
|
|
320
|
+
code = await randomCode(data, col);
|
|
321
|
+
data[f.name] = code;
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
return data;
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
function formatData(
|
|
331
|
+
data: any,
|
|
332
|
+
options: {
|
|
333
|
+
collection?: string;
|
|
334
|
+
action?: Actions;
|
|
335
|
+
tenant_id: string;
|
|
336
|
+
} = {
|
|
337
|
+
tenant_id: "_",
|
|
338
|
+
collection: "",
|
|
339
|
+
}
|
|
340
|
+
) {
|
|
341
|
+
let col = getCollection(options.collection, options?.tenant_id);
|
|
342
|
+
if (col) {
|
|
343
|
+
data = deepSetId(col, data);
|
|
344
|
+
}
|
|
345
|
+
data = toBson(data, {
|
|
346
|
+
action: options?.action,
|
|
347
|
+
});
|
|
348
|
+
|
|
349
|
+
return data;
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
export {
|
|
353
|
+
buildPipeline,
|
|
354
|
+
deepSetId,
|
|
355
|
+
toBson,
|
|
356
|
+
formatData,
|
|
357
|
+
randomCode,
|
|
358
|
+
cleanData,
|
|
359
|
+
hashPasswordAuto,
|
|
360
|
+
transformAllDate,
|
|
361
|
+
};
|
package/index.ts
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import "@colors/colors";
|
|
2
|
+
import { MongoClient, useRest } from "./driver/mongo";
|
|
3
|
+
import { toBson } from "./driver/mongo/utils";
|
|
4
|
+
import { runApp } from "./app";
|
|
5
|
+
import define from "./define";
|
|
6
|
+
import moment from "moment";
|
|
7
|
+
import * as utils from "./utils";
|
|
8
|
+
import * as v from "valibot";
|
|
9
|
+
import * as ai from "./ai";
|
|
10
|
+
|
|
11
|
+
export { runApp, define, v, utils, useRest, ai };
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import { AsyncLocalStorage } from "node:async_hooks";
|
|
2
|
+
import type { sessionCtx } from "../types";
|
|
3
|
+
const asyncLocalStorage = new AsyncLocalStorage();
|
|
4
|
+
const key = "___sessionStorage___";
|
|
5
|
+
const stateKey = "____requestState____";
|
|
6
|
+
const sessionStorage = () => ({
|
|
7
|
+
get(): { state: object; _v: object; token: string } {
|
|
8
|
+
let store = asyncLocalStorage.getStore() as InstanceType<typeof Map>;
|
|
9
|
+
return (
|
|
10
|
+
(store.get(key) as { state: object; _v: object; token: string }) || {
|
|
11
|
+
state: {},
|
|
12
|
+
_v: {},
|
|
13
|
+
token: "",
|
|
14
|
+
}
|
|
15
|
+
);
|
|
16
|
+
},
|
|
17
|
+
set(
|
|
18
|
+
input: { state: object; _v?: object; token?: string | undefined | null } = {
|
|
19
|
+
state: {},
|
|
20
|
+
_v: {},
|
|
21
|
+
token: null,
|
|
22
|
+
}
|
|
23
|
+
) {
|
|
24
|
+
let store = asyncLocalStorage.getStore() as InstanceType<typeof Map>;
|
|
25
|
+
store.set(key, {
|
|
26
|
+
...this.get(),
|
|
27
|
+
state: {
|
|
28
|
+
...this.get().state,
|
|
29
|
+
...input.state,
|
|
30
|
+
},
|
|
31
|
+
token: input.token || this.get().token,
|
|
32
|
+
});
|
|
33
|
+
},
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
const requestStorage = () => ({
|
|
37
|
+
set(key: string, data: any): any {
|
|
38
|
+
let store = asyncLocalStorage.getStore() as InstanceType<typeof Map>;
|
|
39
|
+
return store.set(key, data);
|
|
40
|
+
},
|
|
41
|
+
get(key: string): any {
|
|
42
|
+
let store = asyncLocalStorage.getStore() as InstanceType<typeof Map>;
|
|
43
|
+
return store.get(key);
|
|
44
|
+
},
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
export { sessionStorage, asyncLocalStorage, requestStorage };
|
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
import cleanDeep from "clean-deep";
|
|
2
|
+
import { omit } from "radash";
|
|
3
|
+
import { deepEqual } from "json-joy/lib/json-equal/deepEqual";
|
|
4
|
+
import type { Collection, Field } from "./../types/index";
|
|
5
|
+
import { Glob } from "bun";
|
|
6
|
+
import { Cfg } from "../config";
|
|
7
|
+
import { useRest } from "../driver/mongo/rest";
|
|
8
|
+
import { buildSchema } from "./schema";
|
|
9
|
+
import { cleanPath, resolvePath } from "../utils";
|
|
10
|
+
|
|
11
|
+
import path from "path";
|
|
12
|
+
import consola from "consola";
|
|
13
|
+
|
|
14
|
+
async function loadAllCollections() {
|
|
15
|
+
let collections: Collection[] = [];
|
|
16
|
+
if (Cfg.tenants) {
|
|
17
|
+
for await (let t of Cfg.tenants) {
|
|
18
|
+
let tenantPath = `${t.dir}/collections/**/**.model.{ts,js}`;
|
|
19
|
+
const glob = new Glob(tenantPath);
|
|
20
|
+
for await (let file of glob.scan({
|
|
21
|
+
cwd: Cfg.cwd,
|
|
22
|
+
})) {
|
|
23
|
+
let fullPathFile = path.join(Cfg.cwd || "", file);
|
|
24
|
+
await import(fullPathFile)
|
|
25
|
+
.then((inject) => {
|
|
26
|
+
collections.push({
|
|
27
|
+
...inject?.default,
|
|
28
|
+
tenant_id: t.id,
|
|
29
|
+
schema: buildSchema(inject?.default),
|
|
30
|
+
});
|
|
31
|
+
})
|
|
32
|
+
.catch((err) => {
|
|
33
|
+
consola.error(file || "", err?.message);
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
Cfg.collections = collections;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
function getCollection(
|
|
43
|
+
name: string,
|
|
44
|
+
tenant_id: string | number
|
|
45
|
+
): Collection | undefined | null {
|
|
46
|
+
if (Cfg?.collections?.length) {
|
|
47
|
+
return Cfg.collections.find((col: Collection) => {
|
|
48
|
+
return col.slug === name && col.tenant_id === tenant_id;
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
return null;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
function getFieldCollection(name: string, col: Collection): Field | undefined {
|
|
55
|
+
let field = col?.fields?.find((f) => f.name === name);
|
|
56
|
+
return field;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
async function syncCollectionDatabase() {
|
|
60
|
+
try {
|
|
61
|
+
for await (let t of Cfg.tenants) {
|
|
62
|
+
let collectionsInDatabase = await t.database.db
|
|
63
|
+
?.listCollections()
|
|
64
|
+
.toArray();
|
|
65
|
+
|
|
66
|
+
if (t.database.driver == "mongodb") {
|
|
67
|
+
const collections: Collection[] | undefined = Cfg.collections?.filter(
|
|
68
|
+
(cn) => cn.tenant_id == t.id
|
|
69
|
+
);
|
|
70
|
+
|
|
71
|
+
if (collections?.length) {
|
|
72
|
+
for await (let c of collections) {
|
|
73
|
+
//console.log(c?.slug,c.init)
|
|
74
|
+
if (c?.init && typeof c?.init == "function") {
|
|
75
|
+
await c?.init({
|
|
76
|
+
rest: new useRest({
|
|
77
|
+
tenant_id: t.id,
|
|
78
|
+
}),
|
|
79
|
+
collection: c?.slug,
|
|
80
|
+
});
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
let collectionExist = collectionsInDatabase?.find(
|
|
84
|
+
(col) => col?.name == c.slug
|
|
85
|
+
);
|
|
86
|
+
|
|
87
|
+
//console.log(c.slug, collectionExist);
|
|
88
|
+
|
|
89
|
+
if (!collectionExist) {
|
|
90
|
+
await t.database.db?.createCollection(c.slug);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
var allIndexes = await t.database.db?.collection(c.slug).indexes();
|
|
94
|
+
|
|
95
|
+
for await (let dbIndex of allIndexes || []) {
|
|
96
|
+
if (
|
|
97
|
+
dbIndex?.key?.createdAt ||
|
|
98
|
+
dbIndex?.key?.updatedAt ||
|
|
99
|
+
dbIndex?.key?._id
|
|
100
|
+
) {
|
|
101
|
+
break;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
let findexIndexKey = c.fields?.map(
|
|
105
|
+
(field) => dbIndex.key[field.name]
|
|
106
|
+
);
|
|
107
|
+
if (!findexIndexKey)
|
|
108
|
+
await t.database.db
|
|
109
|
+
?.collection(c?.slug)
|
|
110
|
+
.dropIndex(dbIndex?.name as string);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
//console.log(t.database.db);
|
|
114
|
+
// 1- Creation des indexes normal
|
|
115
|
+
await t.database.db
|
|
116
|
+
?.collection(c.slug)
|
|
117
|
+
.createIndex(["createdAt", "updatedAt"])
|
|
118
|
+
.then((e) => {});
|
|
119
|
+
|
|
120
|
+
// 2- Creation des indexes par champs unique/random
|
|
121
|
+
|
|
122
|
+
c.fields?.map((f) => {
|
|
123
|
+
if (f?.unique || f?.random) {
|
|
124
|
+
t.database.db
|
|
125
|
+
?.collection(c.slug)
|
|
126
|
+
.createIndex(
|
|
127
|
+
{
|
|
128
|
+
[f.name]: 1,
|
|
129
|
+
},
|
|
130
|
+
{
|
|
131
|
+
unique: true,
|
|
132
|
+
sparse: true,
|
|
133
|
+
}
|
|
134
|
+
)
|
|
135
|
+
.catch((err) => {
|
|
136
|
+
console.log(err);
|
|
137
|
+
});
|
|
138
|
+
}
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
// 3- Creation des indexes par indexes
|
|
142
|
+
|
|
143
|
+
for await (let index of c?.indexes || []) {
|
|
144
|
+
let indexHasAlready = allIndexes?.find((el) => {
|
|
145
|
+
return deepEqual(
|
|
146
|
+
el.key,
|
|
147
|
+
omit(index, ["sparse", "unique", "expireAfterSeconds"])
|
|
148
|
+
);
|
|
149
|
+
});
|
|
150
|
+
|
|
151
|
+
if (indexHasAlready) {
|
|
152
|
+
await t.database.db
|
|
153
|
+
?.collection(c.slug)
|
|
154
|
+
.dropIndex(indexHasAlready?.name)
|
|
155
|
+
.catch((err) => {
|
|
156
|
+
console.log(err?.message);
|
|
157
|
+
});
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
await t.database.db?.collection(c.slug).createIndex(
|
|
161
|
+
{
|
|
162
|
+
...omit(index, ["sparse", "unique", "expireAfterSeconds"]),
|
|
163
|
+
},
|
|
164
|
+
//@ts-expect-error
|
|
165
|
+
{
|
|
166
|
+
...cleanDeep({
|
|
167
|
+
unique: index?.unique || false,
|
|
168
|
+
sparse: index?.sparse || false,
|
|
169
|
+
expireAfterSeconds: index.expireAfterSeconds ?? null,
|
|
170
|
+
}),
|
|
171
|
+
}
|
|
172
|
+
);
|
|
173
|
+
}
|
|
174
|
+
// Sortie de boule
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
} catch (err: any) {
|
|
180
|
+
//
|
|
181
|
+
console.error(err?.message || err);
|
|
182
|
+
}
|
|
183
|
+
consola.success("Database synchronized ");
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
export {
|
|
187
|
+
loadAllCollections,
|
|
188
|
+
getCollection,
|
|
189
|
+
getFieldCollection,
|
|
190
|
+
syncCollectionDatabase,
|
|
191
|
+
};
|
package/lib/endpoint.ts
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { omit } from "radash";
|
|
2
|
+
import type { Collection, Endpoint, Field } from "./../types/index";
|
|
3
|
+
import { Glob } from "bun";
|
|
4
|
+
import { Cfg } from "../config";
|
|
5
|
+
import { cleanPath, resolvePath } from "../utils";
|
|
6
|
+
import path from "path";
|
|
7
|
+
import { useRest } from "../driver/mongo/rest";
|
|
8
|
+
|
|
9
|
+
async function loadEndpoints() {
|
|
10
|
+
let endpoints: Endpoint[] = [];
|
|
11
|
+
if (Cfg.tenants) {
|
|
12
|
+
for await (let t of Cfg.tenants) {
|
|
13
|
+
let tenantPath = `${t.dir}/endpoints/**/**.router.{ts,js}`;
|
|
14
|
+
const glob = new Glob(tenantPath);
|
|
15
|
+
for await (let file of glob.scan({
|
|
16
|
+
cwd: Cfg.cwd,
|
|
17
|
+
})) {
|
|
18
|
+
let fullPathFile = path.join(Cfg.cwd || "", file);
|
|
19
|
+
await import(fullPathFile)
|
|
20
|
+
.then((inject) => {
|
|
21
|
+
endpoints.push({
|
|
22
|
+
...inject?.default,
|
|
23
|
+
tenant_id: t.id,
|
|
24
|
+
});
|
|
25
|
+
})
|
|
26
|
+
.catch((err) => {
|
|
27
|
+
console.error(err);
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
Cfg.endpoints = endpoints;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
export { loadEndpoints };
|
package/lib/index.ts
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { loadAllCollections, syncCollectionDatabase } from "./collection";
|
|
2
|
+
import { connectTenantsDatabase } from "../driver";
|
|
3
|
+
import { loadEndpoints } from "./endpoint";
|
|
4
|
+
import { loadServices } from "./service";
|
|
5
|
+
import { loadSocket } from "./socket";
|
|
6
|
+
// load all ressource
|
|
7
|
+
async function init(cf = { app: null }) {
|
|
8
|
+
// load all tenants database
|
|
9
|
+
await connectTenantsDatabase();
|
|
10
|
+
// load all collections
|
|
11
|
+
await loadAllCollections();
|
|
12
|
+
|
|
13
|
+
// sync all collections database ( Indexes)
|
|
14
|
+
syncCollectionDatabase();
|
|
15
|
+
|
|
16
|
+
// load Service
|
|
17
|
+
loadServices();
|
|
18
|
+
|
|
19
|
+
// load Socket
|
|
20
|
+
loadSocket();
|
|
21
|
+
|
|
22
|
+
// load all loadEndpoints
|
|
23
|
+
await loadEndpoints();
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export { init };
|