@ghom/orm 1.7.1 → 1.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/test.yml +40 -0
- package/.run/All Tests.run.xml +22 -0
- package/dist/app/backup.js +119 -0
- package/dist/app/caching.js +36 -0
- package/dist/app/orm.js +109 -0
- package/dist/{esm/app → app}/table.js +40 -8
- package/dist/app/util.js +18 -0
- package/dist/index.js +5 -0
- package/package.json +17 -6
- package/readme.md +117 -12
- package/src/app/backup.ts +211 -0
- package/src/app/caching.ts +51 -0
- package/src/app/orm.ts +100 -104
- package/src/app/table.ts +107 -19
- package/src/app/util.ts +30 -0
- package/src/index.ts +3 -0
- package/tests/tables/a.js +24 -21
- package/tests/tables/b.js +27 -24
- package/tests/tables/c.js +15 -12
- package/tests/test.js +120 -43
- package/tsconfig.json +7 -4
- package/dist/cjs/app/orm.js +0 -94
- package/dist/cjs/app/table.js +0 -104
- package/dist/cjs/index.js +0 -18
- package/dist/esm/app/orm.js +0 -67
- package/dist/esm/index.js +0 -2
- package/dist/typings/app/orm.d.ts +0 -47
- package/dist/typings/app/table.d.ts +0 -39
- package/dist/typings/index.d.ts +0 -2
- package/fixup.sh +0 -11
- package/tsconfig-cjs.json +0 -10
- package/tsconfig-esm.json +0 -8
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
# This is a basic workflow to help you get started with Actions
|
|
2
|
+
|
|
3
|
+
name: Test
|
|
4
|
+
|
|
5
|
+
# Controls when the action will run.
|
|
6
|
+
on:
|
|
7
|
+
# Triggers the workflow on push or pull request events but only for the master branch
|
|
8
|
+
push:
|
|
9
|
+
branches: [ master ]
|
|
10
|
+
pull_request:
|
|
11
|
+
branches: [ master ]
|
|
12
|
+
|
|
13
|
+
# Allows you to run this workflow manually from the Actions tab
|
|
14
|
+
workflow_dispatch:
|
|
15
|
+
|
|
16
|
+
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
|
|
17
|
+
jobs:
|
|
18
|
+
# This workflow contains a single job called "build"
|
|
19
|
+
test:
|
|
20
|
+
# The type of runner that the job will run on
|
|
21
|
+
runs-on: ubuntu-latest
|
|
22
|
+
|
|
23
|
+
# Steps represent a sequence of tasks that will be executed as part of the job
|
|
24
|
+
steps:
|
|
25
|
+
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
|
26
|
+
- uses: actions/checkout@v2
|
|
27
|
+
|
|
28
|
+
- name: Setup Node
|
|
29
|
+
uses: actions/setup-node@v3
|
|
30
|
+
with:
|
|
31
|
+
node-version: 22
|
|
32
|
+
|
|
33
|
+
- name: Install dependencies
|
|
34
|
+
run: npm install
|
|
35
|
+
|
|
36
|
+
- name: Build the source
|
|
37
|
+
run: npm run build
|
|
38
|
+
|
|
39
|
+
- name: Start the tests
|
|
40
|
+
run: npm run test
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
<component name="ProjectRunConfigurationManager">
|
|
2
|
+
<configuration default="false" name="All Tests" type="JavaScriptTestRunnerJest" nameIsGenerated="true">
|
|
3
|
+
<node-interpreter value="project" />
|
|
4
|
+
<node-options value="--experimental-vm-modules" />
|
|
5
|
+
<jest-package value="$PROJECT_DIR$/node_modules/jest" />
|
|
6
|
+
<working-dir value="$PROJECT_DIR$" />
|
|
7
|
+
<jest-options value="tests/test.js --detectOpenHandles" />
|
|
8
|
+
<envs />
|
|
9
|
+
<scope-kind value="ALL" />
|
|
10
|
+
<method v="2">
|
|
11
|
+
<option name="NpmBeforeRunTask" enabled="true">
|
|
12
|
+
<package-json value="$PROJECT_DIR$/package.json" />
|
|
13
|
+
<command value="run" />
|
|
14
|
+
<scripts>
|
|
15
|
+
<script value="build" />
|
|
16
|
+
</scripts>
|
|
17
|
+
<node-interpreter value="project" />
|
|
18
|
+
<envs />
|
|
19
|
+
</option>
|
|
20
|
+
</method>
|
|
21
|
+
</configuration>
|
|
22
|
+
</component>
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
import fs from "fs";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import util from "util";
|
|
4
|
+
import csv from "json-2-csv";
|
|
5
|
+
import csvParser from "csv-parser";
|
|
6
|
+
import { DEFAULT_BACKUP_CHUNK_SIZE, DEFAULT_BACKUP_LOCATION, DEFAULT_LOGGER_HIGHLIGHT, DEFAULT_LOGGER_RAW_VALUE, } from "./util.js";
|
|
7
|
+
export async function backupTable(table, dirname) {
|
|
8
|
+
if (!table.orm)
|
|
9
|
+
throw new Error("missing ORM");
|
|
10
|
+
let offset = 0;
|
|
11
|
+
let chunkIndex = 0;
|
|
12
|
+
const chunkDir = path.join(table.orm.config.backups?.location ?? DEFAULT_BACKUP_LOCATION, dirname ?? "");
|
|
13
|
+
if (!fs.existsSync(chunkDir)) {
|
|
14
|
+
fs.mkdirSync(chunkDir, { recursive: true });
|
|
15
|
+
console.log(`Backup directory ${util.styleText(table.orm.config.loggerStyles?.highlight ?? DEFAULT_LOGGER_HIGHLIGHT, path.relative(process.cwd(), chunkDir))} created.`);
|
|
16
|
+
}
|
|
17
|
+
try {
|
|
18
|
+
// Compter le nombre total d'enregistrements dans la table
|
|
19
|
+
const rowCount = await table.count();
|
|
20
|
+
const limit = 1000; // Limite par requête
|
|
21
|
+
const chunkCount = Math.ceil(rowCount / limit);
|
|
22
|
+
let writeStream = null;
|
|
23
|
+
const closePromises = []; // Tableau pour stocker les promesses de fermeture
|
|
24
|
+
while (offset < rowCount) {
|
|
25
|
+
// Récupérer un "chunk" de données
|
|
26
|
+
const rows = await table.query.select("*").limit(limit).offset(offset);
|
|
27
|
+
// Convertir les données en CSV
|
|
28
|
+
const csvData = csv.json2csv(rows);
|
|
29
|
+
// Si aucun fichier n'est créé ou qu'on a dépassé la taille max du chunk, on crée un nouveau fichier CSV
|
|
30
|
+
if (!writeStream ||
|
|
31
|
+
writeStream.bytesWritten + Buffer.byteLength(csvData, "utf8") >
|
|
32
|
+
(table.orm.config.backups?.chunkSize ?? DEFAULT_BACKUP_CHUNK_SIZE)) {
|
|
33
|
+
if (writeStream) {
|
|
34
|
+
closePromises.push(new Promise((resolve) => writeStream.end(resolve))); // Ajouter la promesse de fermeture
|
|
35
|
+
}
|
|
36
|
+
const chunkFile = path.join(chunkDir, `${table.options.name}_chunk_${chunkIndex}.csv`);
|
|
37
|
+
writeStream = fs.createWriteStream(chunkFile, { flags: "a" });
|
|
38
|
+
chunkIndex++;
|
|
39
|
+
}
|
|
40
|
+
// Écrire les données dans le stream
|
|
41
|
+
writeStream.write(csvData);
|
|
42
|
+
offset += limit;
|
|
43
|
+
process.stdout.write(`\rBacking up table ${util.styleText(table.orm.config.loggerStyles?.highlight ?? DEFAULT_LOGGER_HIGHLIGHT, table.options.name)}: ${util.styleText(table.orm.config.loggerStyles?.rawValue ?? DEFAULT_LOGGER_RAW_VALUE, String(Math.round((chunkIndex / chunkCount) * 100)))}%`);
|
|
44
|
+
}
|
|
45
|
+
if (writeStream) {
|
|
46
|
+
closePromises.push(new Promise((resolve) => writeStream.end(resolve))); // Ajouter la promesse de fermeture pour le dernier stream
|
|
47
|
+
}
|
|
48
|
+
// Attendre que tous les flux d'écriture soient fermés
|
|
49
|
+
await Promise.all(closePromises);
|
|
50
|
+
console.log(`\nBackup of table ${util.styleText(table.orm.config.loggerStyles?.highlight ?? DEFAULT_LOGGER_HIGHLIGHT, table.options.name)} completed.`);
|
|
51
|
+
}
|
|
52
|
+
catch (error) {
|
|
53
|
+
console.error(`\nError while backing up table ${util.styleText(table.orm.config.loggerStyles?.highlight ?? DEFAULT_LOGGER_HIGHLIGHT, table.options.name)}:`, error);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
export async function restoreBackup(table, dirname) {
|
|
57
|
+
if (!table.orm)
|
|
58
|
+
throw new Error("missing ORM");
|
|
59
|
+
const chunkDir = path.join(table.orm.config.backups?.location ?? DEFAULT_BACKUP_LOCATION, dirname ?? "");
|
|
60
|
+
const chunkFiles = fs
|
|
61
|
+
.readdirSync(chunkDir)
|
|
62
|
+
.filter((file) => file.split("_chunk_")[0] === table.options.name);
|
|
63
|
+
await table.query.truncate();
|
|
64
|
+
try {
|
|
65
|
+
const limit = 1000; // Limite par requête
|
|
66
|
+
for (let chunkFile of chunkFiles) {
|
|
67
|
+
const filePath = path.join(chunkDir, chunkFile);
|
|
68
|
+
let rows = [];
|
|
69
|
+
await new Promise((resolve, reject) => {
|
|
70
|
+
fs.createReadStream(filePath)
|
|
71
|
+
.pipe(csvParser())
|
|
72
|
+
.on("data", async (row) => {
|
|
73
|
+
rows.push(row);
|
|
74
|
+
if (rows.length > limit) {
|
|
75
|
+
const rowsCopy = rows.slice();
|
|
76
|
+
rows = [];
|
|
77
|
+
await table.query.insert(rowsCopy);
|
|
78
|
+
}
|
|
79
|
+
})
|
|
80
|
+
.on("end", async () => {
|
|
81
|
+
// Insérer les données dans la table une fois le fichier entièrement lu
|
|
82
|
+
if (rows.length > 0)
|
|
83
|
+
await table.query.insert(rows);
|
|
84
|
+
console.log(`Restored chunk ${util.styleText(table.orm.config.loggerStyles?.highlight ??
|
|
85
|
+
DEFAULT_LOGGER_HIGHLIGHT, chunkFile)} into table ${util.styleText(table.orm.config.loggerStyles?.highlight ??
|
|
86
|
+
DEFAULT_LOGGER_HIGHLIGHT, table.options.name)}.`);
|
|
87
|
+
resolve();
|
|
88
|
+
})
|
|
89
|
+
.on("error", reject);
|
|
90
|
+
});
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
catch (error) {
|
|
94
|
+
console.error(`Error while restoring backup of table ${util.styleText(table.orm.config.loggerStyles?.highlight ?? DEFAULT_LOGGER_HIGHLIGHT, table.options.name)}:`, error);
|
|
95
|
+
}
|
|
96
|
+
console.log(`Backup of table ${util.styleText(table.orm.config.loggerStyles?.highlight ?? DEFAULT_LOGGER_HIGHLIGHT, table.options.name)} restored.`);
|
|
97
|
+
}
|
|
98
|
+
export async function disableForeignKeys(orm) {
|
|
99
|
+
const result = await Promise.allSettled([
|
|
100
|
+
orm.raw("SET session_replication_role = replica;"),
|
|
101
|
+
orm.raw("PRAGMA foreign_keys = OFF;"),
|
|
102
|
+
orm.raw("SET FOREIGN_KEY_CHECKS = 0;"), // for mysql2
|
|
103
|
+
]);
|
|
104
|
+
const errors = result.filter((r) => r.status === "rejected");
|
|
105
|
+
if (errors.length === 3) {
|
|
106
|
+
throw new Error("Failed to disable foreign key constraints.");
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
export async function enableForeignKeys(orm) {
|
|
110
|
+
const result = await Promise.allSettled([
|
|
111
|
+
orm.raw("SET session_replication_role = DEFAULT;"),
|
|
112
|
+
orm.raw("PRAGMA foreign_keys = ON;"),
|
|
113
|
+
orm.raw("SET FOREIGN_KEY_CHECKS = 1;"), // for mysql2
|
|
114
|
+
]);
|
|
115
|
+
const errors = result.filter((r) => r.status === "rejected");
|
|
116
|
+
if (errors.length === 3) {
|
|
117
|
+
throw new Error("Failed to enable foreign key constraints.");
|
|
118
|
+
}
|
|
119
|
+
}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Advanced cache for async queries
|
|
3
|
+
*/
|
|
4
|
+
export class ResponseCache {
|
|
5
|
+
_request;
|
|
6
|
+
_timeout;
|
|
7
|
+
_cache = new Map();
|
|
8
|
+
constructor(_request, _timeout) {
|
|
9
|
+
this._request = _request;
|
|
10
|
+
this._timeout = _timeout;
|
|
11
|
+
}
|
|
12
|
+
get(id, ...params) {
|
|
13
|
+
const cached = this._cache.get(id);
|
|
14
|
+
if (!cached || cached.expires < Date.now()) {
|
|
15
|
+
this._cache.set(id, {
|
|
16
|
+
value: this._request(...params),
|
|
17
|
+
expires: Date.now() + this._timeout,
|
|
18
|
+
});
|
|
19
|
+
}
|
|
20
|
+
return this._cache.get(id).value;
|
|
21
|
+
}
|
|
22
|
+
fetch(id, ...params) {
|
|
23
|
+
this._cache.set(id, {
|
|
24
|
+
value: this._request(...params),
|
|
25
|
+
expires: Date.now() + this._timeout,
|
|
26
|
+
});
|
|
27
|
+
return this._cache.get(id).value;
|
|
28
|
+
}
|
|
29
|
+
invalidate(id) {
|
|
30
|
+
if (!id) {
|
|
31
|
+
this._cache.clear();
|
|
32
|
+
return;
|
|
33
|
+
}
|
|
34
|
+
this._cache.delete(id);
|
|
35
|
+
}
|
|
36
|
+
}
|
package/dist/app/orm.js
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
import url from "url";
|
|
2
|
+
import { Handler } from "@ghom/handler";
|
|
3
|
+
import { default as knex } from "knex";
|
|
4
|
+
import { isCJS } from "./util.js";
|
|
5
|
+
import { Table } from "./table.js";
|
|
6
|
+
import { backupTable, restoreBackup, disableForeignKeys, enableForeignKeys, } from "./backup.js";
|
|
7
|
+
import { ResponseCache } from "./caching";
|
|
8
|
+
export class ORM {
|
|
9
|
+
config;
|
|
10
|
+
_ready = false;
|
|
11
|
+
database;
|
|
12
|
+
handler;
|
|
13
|
+
_rawCache;
|
|
14
|
+
constructor(config) {
|
|
15
|
+
this.config = config;
|
|
16
|
+
this.database = knex(config.database ?? {
|
|
17
|
+
client: "sqlite3",
|
|
18
|
+
useNullAsDefault: true,
|
|
19
|
+
connection: {
|
|
20
|
+
filename: ":memory:",
|
|
21
|
+
},
|
|
22
|
+
});
|
|
23
|
+
this.handler = new Handler(config.tableLocation, {
|
|
24
|
+
loader: (filepath) => import(isCJS ? filepath : url.pathToFileURL(filepath).href).then((file) => file.default),
|
|
25
|
+
pattern: /\.js$/,
|
|
26
|
+
});
|
|
27
|
+
this._rawCache = new ResponseCache((raw) => this.raw(raw), config.caching ?? Infinity);
|
|
28
|
+
}
|
|
29
|
+
get cachedTables() {
|
|
30
|
+
return [...this.handler.elements.values()];
|
|
31
|
+
}
|
|
32
|
+
get cachedTableNames() {
|
|
33
|
+
return this.cachedTables.map((table) => table.options.name);
|
|
34
|
+
}
|
|
35
|
+
hasCachedTable(name) {
|
|
36
|
+
return this.cachedTables.some((table) => table.options.name === name);
|
|
37
|
+
}
|
|
38
|
+
async hasTable(name) {
|
|
39
|
+
return this.database.schema.hasTable(name);
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Handle the table files and create the tables in the database.
|
|
43
|
+
*/
|
|
44
|
+
async init() {
|
|
45
|
+
await this.handler.init();
|
|
46
|
+
await enableForeignKeys(this);
|
|
47
|
+
this.handler.elements.set("migration", new Table({
|
|
48
|
+
name: "migration",
|
|
49
|
+
priority: Infinity,
|
|
50
|
+
setup: (table) => {
|
|
51
|
+
table.string("table").unique().notNullable();
|
|
52
|
+
table.integer("version").notNullable();
|
|
53
|
+
},
|
|
54
|
+
}));
|
|
55
|
+
for (const table of this.cachedTables.sort((a, b) => (b.options.priority ?? 0) - (a.options.priority ?? 0))) {
|
|
56
|
+
table.orm = this;
|
|
57
|
+
await table.make();
|
|
58
|
+
}
|
|
59
|
+
this._ready = true;
|
|
60
|
+
}
|
|
61
|
+
raw(sql) {
|
|
62
|
+
if (this._ready)
|
|
63
|
+
this.cache.invalidate();
|
|
64
|
+
return this.database.raw(sql);
|
|
65
|
+
}
|
|
66
|
+
cache = {
|
|
67
|
+
raw: (sql, anyDataUpdated) => {
|
|
68
|
+
if (anyDataUpdated)
|
|
69
|
+
this.cache.invalidate();
|
|
70
|
+
return this._rawCache.get(sql, sql);
|
|
71
|
+
},
|
|
72
|
+
invalidate: () => {
|
|
73
|
+
this._rawCache.invalidate();
|
|
74
|
+
this.cachedTables.forEach((table) => table.cache.invalidate());
|
|
75
|
+
},
|
|
76
|
+
};
|
|
77
|
+
/**
|
|
78
|
+
* Create a backup of the database. <br>
|
|
79
|
+
* The backup will be saved in the location specified in the config.
|
|
80
|
+
*/
|
|
81
|
+
async createBackup(dirname) {
|
|
82
|
+
try {
|
|
83
|
+
for (let table of this.cachedTables) {
|
|
84
|
+
await backupTable(table, dirname);
|
|
85
|
+
}
|
|
86
|
+
console.log("Database backup created.");
|
|
87
|
+
}
|
|
88
|
+
catch (error) {
|
|
89
|
+
console.error("Error while creating backup of the database.", error);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
/**
|
|
93
|
+
* Restore the database from the backup. <br>
|
|
94
|
+
* @warning This will delete all the data in the tables.
|
|
95
|
+
*/
|
|
96
|
+
async restoreBackup(dirname) {
|
|
97
|
+
try {
|
|
98
|
+
await disableForeignKeys(this);
|
|
99
|
+
for (let table of this.cachedTables) {
|
|
100
|
+
await restoreBackup(table, dirname);
|
|
101
|
+
}
|
|
102
|
+
await enableForeignKeys(this);
|
|
103
|
+
console.log("Database restored from backup.");
|
|
104
|
+
}
|
|
105
|
+
catch (error) {
|
|
106
|
+
console.error("Error while restoring backup of the database.", error);
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
}
|
|
@@ -1,7 +1,11 @@
|
|
|
1
|
-
import
|
|
1
|
+
import util from "util";
|
|
2
|
+
import { ResponseCache } from "./caching.js";
|
|
3
|
+
import { DEFAULT_LOGGER_DESCRIPTION, DEFAULT_LOGGER_HIGHLIGHT, DEFAULT_LOGGER_RAW_VALUE, } from "./util.js";
|
|
2
4
|
export class Table {
|
|
3
5
|
options;
|
|
4
6
|
orm;
|
|
7
|
+
_whereCache;
|
|
8
|
+
_countCache;
|
|
5
9
|
constructor(options) {
|
|
6
10
|
this.options = options;
|
|
7
11
|
}
|
|
@@ -13,6 +17,30 @@ export class Table {
|
|
|
13
17
|
get query() {
|
|
14
18
|
return this.db(this.options.name);
|
|
15
19
|
}
|
|
20
|
+
get cache() {
|
|
21
|
+
if (!this._whereCache || !this._countCache)
|
|
22
|
+
throw new Error("missing cache");
|
|
23
|
+
if (!this.orm)
|
|
24
|
+
throw new Error("missing ORM");
|
|
25
|
+
return {
|
|
26
|
+
get: (id, cb) => {
|
|
27
|
+
return this._whereCache.get(id, cb);
|
|
28
|
+
},
|
|
29
|
+
set: (cb) => {
|
|
30
|
+
// todo: invalidate only the related tables
|
|
31
|
+
this.orm.cache.invalidate();
|
|
32
|
+
return cb(this.query);
|
|
33
|
+
},
|
|
34
|
+
count: (where) => {
|
|
35
|
+
return this._countCache.get(where ?? "*", where ?? null);
|
|
36
|
+
},
|
|
37
|
+
invalidate: () => {
|
|
38
|
+
this._whereCache.invalidate();
|
|
39
|
+
this._countCache.invalidate();
|
|
40
|
+
this.orm._rawCache.invalidate();
|
|
41
|
+
},
|
|
42
|
+
};
|
|
43
|
+
}
|
|
16
44
|
async count(where) {
|
|
17
45
|
return this.query
|
|
18
46
|
.select(this.db.raw("count(*) as total"))
|
|
@@ -32,32 +60,36 @@ export class Table {
|
|
|
32
60
|
return this.getColumns().then(Object.keys);
|
|
33
61
|
}
|
|
34
62
|
async isEmpty() {
|
|
35
|
-
return this.count().then((count) => count === 0);
|
|
63
|
+
return this.count().then((count) => +count === 0);
|
|
36
64
|
}
|
|
37
65
|
async make() {
|
|
38
66
|
if (!this.orm)
|
|
39
67
|
throw new Error("missing ORM");
|
|
68
|
+
this._whereCache = new ResponseCache((cb) => cb(this.query), this.options.caching ?? this.orm?.config.caching ?? Infinity);
|
|
69
|
+
this._countCache = new ResponseCache((where) => this.count(where ?? undefined), this.options.caching ?? this.orm?.config.caching ?? Infinity);
|
|
40
70
|
try {
|
|
41
71
|
await this.db.schema.createTable(this.options.name, this.options.setup);
|
|
42
|
-
this.orm.config.logger?.log(`created table ${
|
|
43
|
-
? ` ${
|
|
72
|
+
this.orm.config.logger?.log(`created table ${util.styleText(this.orm.config.loggerStyles?.highlight ?? DEFAULT_LOGGER_HIGHLIGHT, this.options.name)}${this.options.description
|
|
73
|
+
? ` ${util.styleText(this.orm.config.loggerStyles?.description ??
|
|
74
|
+
DEFAULT_LOGGER_DESCRIPTION, this.options.description)}`
|
|
44
75
|
: ""}`);
|
|
45
76
|
}
|
|
46
77
|
catch (error) {
|
|
47
78
|
if (error.toString().includes("syntax error")) {
|
|
48
|
-
this.orm.config.logger?.error(`you need to implement the "setup" method in options of your ${
|
|
79
|
+
this.orm.config.logger?.error(`you need to implement the "setup" method in options of your ${util.styleText(this.orm.config.loggerStyles?.highlight ?? DEFAULT_LOGGER_HIGHLIGHT, this.options.name)} table!`);
|
|
49
80
|
throw error;
|
|
50
81
|
}
|
|
51
82
|
else {
|
|
52
|
-
this.orm.config.logger?.log(`loaded table ${
|
|
53
|
-
? ` ${
|
|
83
|
+
this.orm.config.logger?.log(`loaded table ${util.styleText(this.orm.config.loggerStyles?.highlight ?? DEFAULT_LOGGER_HIGHLIGHT, this.options.name)}${this.options.description
|
|
84
|
+
? ` ${util.styleText(this.orm.config.loggerStyles?.description ??
|
|
85
|
+
DEFAULT_LOGGER_DESCRIPTION, this.options.description)}`
|
|
54
86
|
: ""}`);
|
|
55
87
|
}
|
|
56
88
|
}
|
|
57
89
|
try {
|
|
58
90
|
const migrated = await this.migrate();
|
|
59
91
|
if (migrated !== false) {
|
|
60
|
-
this.orm.config.logger?.log(`migrated table ${
|
|
92
|
+
this.orm.config.logger?.log(`migrated table ${util.styleText(this.orm.config.loggerStyles?.highlight ?? DEFAULT_LOGGER_HIGHLIGHT, this.options.name)} to version ${util.styleText(this.orm.config.loggerStyles?.rawValue ?? DEFAULT_LOGGER_RAW_VALUE, String(migrated))}`);
|
|
61
93
|
}
|
|
62
94
|
}
|
|
63
95
|
catch (error) {
|
package/dist/app/util.js
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import path from "path";
|
|
2
|
+
import fs from "fs";
|
|
3
|
+
export const DEFAULT_BACKUP_LOCATION = path.join(process.cwd(), "backup");
|
|
4
|
+
export const DEFAULT_BACKUP_CHUNK_SIZE = 5 * 1024 * 1024; // 5MB
|
|
5
|
+
export const DEFAULT_LOGGER_HIGHLIGHT = "blueBright";
|
|
6
|
+
export const DEFAULT_LOGGER_DESCRIPTION = "grey";
|
|
7
|
+
export const DEFAULT_LOGGER_RAW_VALUE = "magentaBright";
|
|
8
|
+
let isCJS = false;
|
|
9
|
+
try {
|
|
10
|
+
const pack = JSON.parse(fs.readFileSync(path.join(process.cwd(), "package.json"), "utf8"));
|
|
11
|
+
isCJS = pack.type === "commonjs" || pack.type == void 0;
|
|
12
|
+
}
|
|
13
|
+
catch {
|
|
14
|
+
throw new Error("Missing package.json: Can't detect the type of modules.\n" +
|
|
15
|
+
"The ORM needs a package.json file present in the process's current working directory.\n" +
|
|
16
|
+
"Please create a package.json file or run the project from another entry point.");
|
|
17
|
+
}
|
|
18
|
+
export { isCJS };
|
package/dist/index.js
ADDED
package/package.json
CHANGED
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@ghom/orm",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.8.0",
|
|
4
4
|
"license": "MIT",
|
|
5
5
|
"type": "module",
|
|
6
|
-
"main": "dist/
|
|
7
|
-
"types": "dist/
|
|
6
|
+
"main": "dist/index.js",
|
|
7
|
+
"types": "dist/index.d.ts",
|
|
8
8
|
"description": "TypeScript KnexJS ORM & handler",
|
|
9
|
+
"homepage": "https://github.com/GhomKrosmonaute/orm",
|
|
9
10
|
"prettier": {
|
|
10
11
|
"semi": false
|
|
11
12
|
},
|
|
@@ -14,17 +15,18 @@
|
|
|
14
15
|
"require": "./dist/cjs/index.js"
|
|
15
16
|
},
|
|
16
17
|
"scripts": {
|
|
17
|
-
"format": "prettier --write src tsconfig
|
|
18
|
-
"build": "
|
|
18
|
+
"format": "prettier --write src tsconfig.json tests",
|
|
19
|
+
"build": "rimraf dist && tsc",
|
|
19
20
|
"test": "npm run build && node --experimental-vm-modules node_modules/jest/bin/jest.js tests/test.js --detectOpenHandles",
|
|
20
21
|
"prepublishOnly": "npm run format && npm test"
|
|
21
22
|
},
|
|
22
23
|
"devDependencies": {
|
|
23
24
|
"@types/jest": "^29.5.6",
|
|
24
|
-
"@types/node": "^
|
|
25
|
+
"@types/node": "^22.0.0",
|
|
25
26
|
"dotenv": "^16.3.1",
|
|
26
27
|
"jest": "^29.7.0",
|
|
27
28
|
"prettier": "^3.0.3",
|
|
29
|
+
"rimraf": "^6.0.1",
|
|
28
30
|
"typescript": "^5.2.2"
|
|
29
31
|
},
|
|
30
32
|
"optionalDependencies": {
|
|
@@ -34,6 +36,15 @@
|
|
|
34
36
|
},
|
|
35
37
|
"dependencies": {
|
|
36
38
|
"@ghom/handler": "^2.0.0",
|
|
39
|
+
"csv-parser": "^3.0.0",
|
|
40
|
+
"json-2-csv": "^5.5.6",
|
|
37
41
|
"knex": "^3.0.1"
|
|
42
|
+
},
|
|
43
|
+
"engines": {
|
|
44
|
+
"node": ">=22.0.0"
|
|
45
|
+
},
|
|
46
|
+
"repository": {
|
|
47
|
+
"url": "https://github.com/GhomKrosmonaute/orm.git",
|
|
48
|
+
"type": "git"
|
|
38
49
|
}
|
|
39
50
|
}
|
package/readme.md
CHANGED
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
# TypeScript KnexJS ORM & handler
|
|
2
2
|
|
|
3
|
+

|
|
4
|
+
|
|
3
5
|
## Install
|
|
4
6
|
|
|
5
7
|
```bash
|
|
@@ -13,18 +15,29 @@ import { ORM } from "@ghom/orm"
|
|
|
13
15
|
|
|
14
16
|
const orm = new ORM({
|
|
15
17
|
// tables directory
|
|
16
|
-
|
|
18
|
+
tableLocation: "./tables",
|
|
17
19
|
|
|
18
20
|
// knex config (sqlite3 by default)
|
|
19
21
|
database: { ... },
|
|
20
22
|
|
|
21
23
|
// custom logger (console by default)
|
|
22
24
|
logger: console,
|
|
23
|
-
loggerColors: { ... }
|
|
25
|
+
loggerColors: { ... },
|
|
26
|
+
|
|
27
|
+
// caching options for all tables and rawCache queries (default to Infinity)
|
|
28
|
+
caching: 10 * 60 * 1000,
|
|
29
|
+
|
|
30
|
+
// configuration for the database backups
|
|
31
|
+
backups: {
|
|
32
|
+
location: "./backups",
|
|
33
|
+
}
|
|
24
34
|
})
|
|
35
|
+
|
|
36
|
+
// start handling of tables
|
|
37
|
+
await orm.init()
|
|
25
38
|
```
|
|
26
39
|
|
|
27
|
-
##
|
|
40
|
+
## Add tables
|
|
28
41
|
|
|
29
42
|
The tables are automatically loaded from the `location` directory.
|
|
30
43
|
|
|
@@ -33,7 +46,12 @@ The tables are automatically loaded from the `location` directory.
|
|
|
33
46
|
|
|
34
47
|
import { Table } from "@ghom/orm"
|
|
35
48
|
|
|
36
|
-
|
|
49
|
+
interface User {
|
|
50
|
+
username: string
|
|
51
|
+
password: string
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
export default new Table<User>({
|
|
37
55
|
name: "user",
|
|
38
56
|
|
|
39
57
|
// the higher the priority, the earlier the table is compiled
|
|
@@ -55,24 +73,111 @@ export default new Table({
|
|
|
55
73
|
// the then is executed after the table is created and the migrations are runned
|
|
56
74
|
then: ({ query }) => {
|
|
57
75
|
query.insert({ username: "admin", password: "admin" })
|
|
58
|
-
}
|
|
76
|
+
},
|
|
77
|
+
|
|
78
|
+
caching: 10 * 60 * 1000 // The table cache. Default to the ORM cache or Infinity
|
|
59
79
|
})
|
|
60
80
|
```
|
|
61
81
|
|
|
62
|
-
##
|
|
82
|
+
## Launch a query
|
|
83
|
+
|
|
84
|
+
For more information about the query builder, see [knexjs.org](https://knexjs.org/).
|
|
85
|
+
You can launch a SQL query on a table like that
|
|
86
|
+
|
|
87
|
+
```typescript
|
|
88
|
+
import user from "./tables/user"
|
|
89
|
+
|
|
90
|
+
export async function compareHash(username, hash): Promise<boolean> {
|
|
91
|
+
const user = await user.query
|
|
92
|
+
.select()
|
|
93
|
+
.where("username", username)
|
|
94
|
+
.first()
|
|
95
|
+
|
|
96
|
+
return user && user.password === hash
|
|
97
|
+
}
|
|
98
|
+
```
|
|
99
|
+
|
|
100
|
+
## Backup
|
|
101
|
+
|
|
102
|
+
You can backup the database by calling the `createBackup` and `restoreBackup` methods on the ORM instance. The backup is stored in the `config.backups.location` directory.
|
|
103
|
+
|
|
104
|
+
```typescript
|
|
105
|
+
await orm.createBackup() // on the root backup directory (not recommended)
|
|
106
|
+
await orm.createBackup("2021-01-01T00:00:00.000Z") // with dirname as backup ID
|
|
63
107
|
|
|
64
|
-
|
|
108
|
+
await orm.restoreBackup()
|
|
109
|
+
await orm.restoreBackup("2021-01-01T00:00:00.000Z")
|
|
110
|
+
```
|
|
65
111
|
|
|
66
|
-
##
|
|
112
|
+
## Caching
|
|
67
113
|
|
|
68
|
-
|
|
114
|
+
The cache is automatically managed by the ORM. When a table is requested from the `<Table>.cache` property, the ORM will automatically use caching for all your queries. The cache is useful when you have a lot of requests on the same table, and you don't want to query the database every time.
|
|
69
115
|
|
|
70
116
|
```typescript
|
|
71
|
-
|
|
117
|
+
// get the number of rows in the table with caching
|
|
118
|
+
await table.cache.count() // => 10
|
|
119
|
+
|
|
120
|
+
// add a row with caching
|
|
121
|
+
await table.cache.set((query) => {
|
|
122
|
+
return query.insert({ name: "test" })
|
|
123
|
+
})
|
|
124
|
+
|
|
125
|
+
await table.cache.count() // => 11
|
|
126
|
+
|
|
127
|
+
// Get the row with caching.
|
|
128
|
+
// After the first call, the row is cached until
|
|
129
|
+
// the cache is invalidate by a "cache.set" or "cache.invalidate" call
|
|
130
|
+
await table.cache.get("named test", (query) => {
|
|
131
|
+
return query.where("name", "test").first()
|
|
132
|
+
}) // => { name: "test" }
|
|
133
|
+
|
|
134
|
+
// delete the row without caching
|
|
135
|
+
await table.query.delete().where("name", "test")
|
|
136
|
+
|
|
137
|
+
await table.cache.count() // => 11 (unchanged)
|
|
138
|
+
|
|
139
|
+
// indicate that the cache is invalidate
|
|
140
|
+
// and force the cache to be updated
|
|
141
|
+
table.cache.invalidate()
|
|
142
|
+
|
|
143
|
+
await table.cache.count() // => 10
|
|
144
|
+
await table.cache.count() // => 10 (no more query to the database)
|
|
145
|
+
|
|
146
|
+
// remove all rows from a table with caching
|
|
147
|
+
await table.cache.set((query) => {
|
|
148
|
+
return query.truncate()
|
|
149
|
+
})
|
|
150
|
+
|
|
151
|
+
await table.cache.count() // => 0
|
|
72
152
|
```
|
|
73
153
|
|
|
154
|
+
> ⚠️ For a cache-style usage, you should use the `cache` property for all your queries. If you use the `query` property, the cache will not be updated, and it will cause a cache inconsistency.
|
|
155
|
+
|
|
156
|
+
### Raw cache
|
|
157
|
+
|
|
158
|
+
You can also cache raw queries with the `<ORM>.cache.raw` property. The raw cache is useful when you have a complex query that you want to cache.
|
|
159
|
+
|
|
74
160
|
```typescript
|
|
75
|
-
await
|
|
161
|
+
const fooUser = await orm.cache.raw("select * from user where name = 'foo'") // query the database
|
|
162
|
+
const barUser = await orm.cache.raw("select * from user where name = 'bar'") // query the database
|
|
163
|
+
const fooUserCached = await orm.cache.raw("select * from user where name = 'foo'") // no query to the database
|
|
76
164
|
```
|
|
77
165
|
|
|
78
|
-
The
|
|
166
|
+
The cache of the `<ORM>.cache.raw` method is automatically invalidated when the database is updated.
|
|
167
|
+
|
|
168
|
+
## Future features
|
|
169
|
+
|
|
170
|
+
- [x] Add timed caching system
|
|
171
|
+
- [x] Add backup option
|
|
172
|
+
- [ ] Dependency management between tables
|
|
173
|
+
- [ ] Auto typings for tables from the column definitions
|
|
174
|
+
- [ ] Add specific methods for relations and joins
|
|
175
|
+
- [ ] Add admin panel
|
|
176
|
+
- [ ] Make possible to switch the data between all possible clients (pg, mysql, sqlite3)
|
|
177
|
+
- [ ] Add a way to set up timezone directly in the ORM constructor
|
|
178
|
+
|
|
179
|
+
## Used by
|
|
180
|
+
|
|
181
|
+
[<img src="https://github.com/GhomKrosmonaute.png?size=50" width=50>](https://github.com/GhomKrosmonaute)
|
|
182
|
+
[<img src="https://github.com/Les-Laboratoires.png?size=50" width=50>](https://github.com/Les-Laboratoires)
|
|
183
|
+
[<img src="https://github.com/bot-ts.png?size=50" width=50>](https://github.com/bot-ts)
|