@ghom/orm 1.7.2 → 1.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,211 @@
1
+ import fs from "fs"
2
+ import path from "path"
3
+ import util from "util"
4
+ import csv from "json-2-csv"
5
+ import csvParser from "csv-parser"
6
+ import { ORM } from "./orm.js"
7
+ import { Table } from "./table.js"
8
+ import {
9
+ DEFAULT_BACKUP_CHUNK_SIZE,
10
+ DEFAULT_BACKUP_LOCATION,
11
+ DEFAULT_LOGGER_HIGHLIGHT,
12
+ DEFAULT_LOGGER_RAW_VALUE,
13
+ } from "./util.js"
14
+
15
+ export async function backupTable(table: Table, dirname?: string) {
16
+ if (!table.orm) throw new Error("missing ORM")
17
+
18
+ let offset = 0
19
+ let chunkIndex = 0
20
+
21
+ const chunkDir = path.join(
22
+ table.orm.config.backups?.location ?? DEFAULT_BACKUP_LOCATION,
23
+ dirname ?? "",
24
+ )
25
+
26
+ if (!fs.existsSync(chunkDir)) {
27
+ fs.mkdirSync(chunkDir, { recursive: true })
28
+
29
+ console.log(
30
+ `Backup directory ${util.styleText(
31
+ table.orm.config.loggerStyles?.highlight ?? DEFAULT_LOGGER_HIGHLIGHT,
32
+ path.relative(process.cwd(), chunkDir),
33
+ )} created.`,
34
+ )
35
+ }
36
+
37
+ try {
38
+ // Compter le nombre total d'enregistrements dans la table
39
+ const rowCount = await table.count()
40
+ const limit = 1000 // Limite par requête
41
+ const chunkCount = Math.ceil(rowCount / limit)
42
+
43
+ let writeStream: fs.WriteStream | null = null
44
+ const closePromises = [] // Tableau pour stocker les promesses de fermeture
45
+
46
+ while (offset < rowCount) {
47
+ // Récupérer un "chunk" de données
48
+ const rows = await table.query.select("*").limit(limit).offset(offset)
49
+
50
+ // Convertir les données en CSV
51
+ const csvData = csv.json2csv(rows)
52
+
53
+ // Si aucun fichier n'est créé ou qu'on a dépassé la taille max du chunk, on crée un nouveau fichier CSV
54
+ if (
55
+ !writeStream ||
56
+ writeStream.bytesWritten + Buffer.byteLength(csvData, "utf8") >
57
+ (table.orm.config.backups?.chunkSize ?? DEFAULT_BACKUP_CHUNK_SIZE)
58
+ ) {
59
+ if (writeStream) {
60
+ closePromises.push(
61
+ new Promise((resolve) => writeStream!.end(resolve)),
62
+ ) // Ajouter la promesse de fermeture
63
+ }
64
+
65
+ const chunkFile = path.join(
66
+ chunkDir,
67
+ `${table.options.name}_chunk_${chunkIndex}.csv`,
68
+ )
69
+ writeStream = fs.createWriteStream(chunkFile, { flags: "a" })
70
+ chunkIndex++
71
+ }
72
+
73
+ // Écrire les données dans le stream
74
+ writeStream.write(csvData)
75
+ offset += limit
76
+
77
+ process.stdout.write(
78
+ `\rBacking up table ${util.styleText(
79
+ table.orm.config.loggerStyles?.highlight ?? DEFAULT_LOGGER_HIGHLIGHT,
80
+ table.options.name,
81
+ )}: ${util.styleText(
82
+ table.orm.config.loggerStyles?.rawValue ?? DEFAULT_LOGGER_RAW_VALUE,
83
+ String(Math.round((chunkIndex / chunkCount) * 100)),
84
+ )}%`,
85
+ )
86
+ }
87
+
88
+ if (writeStream) {
89
+ closePromises.push(new Promise((resolve) => writeStream!.end(resolve))) // Ajouter la promesse de fermeture pour le dernier stream
90
+ }
91
+
92
+ // Attendre que tous les flux d'écriture soient fermés
93
+ await Promise.all(closePromises)
94
+
95
+ console.log(
96
+ `\nBackup of table ${util.styleText(
97
+ table.orm.config.loggerStyles?.highlight ?? DEFAULT_LOGGER_HIGHLIGHT,
98
+ table.options.name,
99
+ )} completed.`,
100
+ )
101
+ } catch (error) {
102
+ console.error(
103
+ `\nError while backing up table ${util.styleText(
104
+ table.orm.config.loggerStyles?.highlight ?? DEFAULT_LOGGER_HIGHLIGHT,
105
+ table.options.name,
106
+ )}:`,
107
+ error,
108
+ )
109
+ }
110
+ }
111
+
112
+ export async function restoreBackup(table: Table, dirname?: string) {
113
+ if (!table.orm) throw new Error("missing ORM")
114
+
115
+ const chunkDir = path.join(
116
+ table.orm.config.backups?.location ?? DEFAULT_BACKUP_LOCATION,
117
+ dirname ?? "",
118
+ )
119
+
120
+ const chunkFiles = fs
121
+ .readdirSync(chunkDir)
122
+ .filter((file) => file.split("_chunk_")[0] === table.options.name)
123
+
124
+ await table.query.truncate()
125
+
126
+ try {
127
+ const limit = 1000 // Limite par requête
128
+
129
+ for (let chunkFile of chunkFiles) {
130
+ const filePath = path.join(chunkDir, chunkFile)
131
+
132
+ let rows: any[] = []
133
+
134
+ await new Promise<void>((resolve, reject) => {
135
+ fs.createReadStream(filePath)
136
+ .pipe(csvParser())
137
+ .on("data", async (row) => {
138
+ rows.push(row)
139
+
140
+ if (rows.length > limit) {
141
+ const rowsCopy = rows.slice()
142
+ rows = []
143
+ await table.query.insert(rowsCopy)
144
+ }
145
+ })
146
+ .on("end", async () => {
147
+ // Insérer les données dans la table une fois le fichier entièrement lu
148
+ if (rows.length > 0) await table.query.insert(rows)
149
+
150
+ console.log(
151
+ `Restored chunk ${util.styleText(
152
+ table.orm!.config.loggerStyles?.highlight ??
153
+ DEFAULT_LOGGER_HIGHLIGHT,
154
+ chunkFile,
155
+ )} into table ${util.styleText(
156
+ table.orm!.config.loggerStyles?.highlight ??
157
+ DEFAULT_LOGGER_HIGHLIGHT,
158
+ table.options.name,
159
+ )}.`,
160
+ )
161
+
162
+ resolve()
163
+ })
164
+ .on("error", reject)
165
+ })
166
+ }
167
+ } catch (error) {
168
+ console.error(
169
+ `Error while restoring backup of table ${util.styleText(
170
+ table.orm.config.loggerStyles?.highlight ?? DEFAULT_LOGGER_HIGHLIGHT,
171
+ table.options.name,
172
+ )}:`,
173
+ error,
174
+ )
175
+ }
176
+
177
+ console.log(
178
+ `Backup of table ${util.styleText(
179
+ table.orm.config.loggerStyles?.highlight ?? DEFAULT_LOGGER_HIGHLIGHT,
180
+ table.options.name,
181
+ )} restored.`,
182
+ )
183
+ }
184
+
185
+ export async function disableForeignKeys(orm: ORM) {
186
+ const result = await Promise.allSettled([
187
+ orm.raw("SET session_replication_role = replica;"), // for pg
188
+ orm.raw("PRAGMA foreign_keys = OFF;"), // for sqlite3
189
+ orm.raw("SET FOREIGN_KEY_CHECKS = 0;"), // for mysql2
190
+ ])
191
+
192
+ const errors = result.filter((r) => r.status === "rejected")
193
+
194
+ if (errors.length === 3) {
195
+ throw new Error("Failed to disable foreign key constraints.")
196
+ }
197
+ }
198
+
199
+ export async function enableForeignKeys(orm: ORM) {
200
+ const result = await Promise.allSettled([
201
+ orm.raw("SET session_replication_role = DEFAULT;"), // for pg
202
+ orm.raw("PRAGMA foreign_keys = ON;"), // for sqlite3
203
+ orm.raw("SET FOREIGN_KEY_CHECKS = 1;"), // for mysql2
204
+ ])
205
+
206
+ const errors = result.filter((r) => r.status === "rejected")
207
+
208
+ if (errors.length === 3) {
209
+ throw new Error("Failed to enable foreign key constraints.")
210
+ }
211
+ }
@@ -0,0 +1,51 @@
1
+ export interface ResponseCacheData<Value> {
2
+ value: Value
3
+ expires: number
4
+ outdated?: boolean
5
+ }
6
+
7
+ /**
8
+ * Advanced cache for async queries
9
+ */
10
+ export class ResponseCache<Params extends any[], Value> {
11
+ private _cache = new Map<string, ResponseCacheData<Value>>()
12
+
13
+ constructor(
14
+ private _request: (...params: Params) => Value,
15
+ private _timeout: number,
16
+ ) {}
17
+
18
+ get(id: string, ...params: Params): Value {
19
+ const cached = this._cache.get(id)
20
+
21
+ if (!cached || cached.expires < Date.now()) {
22
+ this._cache.set(id, {
23
+ value: this._request(...params),
24
+ expires: Date.now() + this._timeout,
25
+ })
26
+ }
27
+
28
+ return this._cache.get(id)!.value
29
+ }
30
+
31
+ fetch(id: string, ...params: Params): Value {
32
+ this._cache.set(id, {
33
+ value: this._request(...params),
34
+ expires: Date.now() + this._timeout,
35
+ })
36
+
37
+ return this._cache.get(id)!.value
38
+ }
39
+
40
+ invalidate(): void
41
+ invalidate(id: string): void
42
+ invalidate(id?: string): void {
43
+ if (!id) {
44
+ this._cache.clear()
45
+
46
+ return
47
+ }
48
+
49
+ this._cache.delete(id)
50
+ }
51
+ }
package/src/app/orm.ts CHANGED
@@ -1,17 +1,15 @@
1
- import fs from "fs"
2
1
  import url from "url"
3
- import path from "path"
4
2
  import { Handler } from "@ghom/handler"
5
3
  import { Knex, default as knex } from "knex"
4
+ import { isCJS, TextStyle } from "./util.js"
6
5
  import { MigrationData, Table } from "./table.js"
7
- import chalk, { Color } from "chalk"
8
-
9
- const defaultBackupDir = path.join(process.cwd(), "backup")
10
-
11
- const pack = JSON.parse(
12
- fs.readFileSync(path.join(process.cwd(), "package.json"), "utf8"),
13
- )
14
- const isCJS = pack.type === "commonjs" || pack.type == void 0
6
+ import {
7
+ backupTable,
8
+ restoreBackup,
9
+ disableForeignKeys,
10
+ enableForeignKeys,
11
+ } from "./backup.js"
12
+ import { ResponseCache } from "./caching"
15
13
 
16
14
  export interface ILogger {
17
15
  log: (message: string) => void
@@ -23,7 +21,7 @@ export interface ORMConfig {
23
21
  /**
24
22
  * path to the directory that contains js files of tables
25
23
  */
26
- location: string
24
+ tableLocation: string
27
25
 
28
26
  /**
29
27
  * database configuration
@@ -37,19 +35,37 @@ export interface ORMConfig {
37
35
 
38
36
  /**
39
37
  * Pattern used on logs when the table files are loaded or created. <br>
40
- * Based on Chalk color-method names.
38
+ * Based on node:util.styleText style names.
41
39
  */
42
- loggerColors?: {
43
- highlight: typeof Color
44
- rawValue: typeof Color
45
- description: typeof Color
40
+ loggerStyles?: {
41
+ highlight: TextStyle
42
+ rawValue: TextStyle
43
+ description: TextStyle
46
44
  }
45
+
46
+ /**
47
+ * Configuration for the database backups.
48
+ */
49
+ backups?: {
50
+ location?: string
51
+ chunkSize?: number
52
+ }
53
+
54
+ /**
55
+ * The cache time in milliseconds. <br>
56
+ * Default is `Infinity`.
57
+ */
58
+ caching?: number
47
59
  }
48
60
 
49
61
  export class ORM {
62
+ private _ready = false
63
+
50
64
  public database: Knex
51
65
  public handler: Handler<Table<any>>
52
66
 
67
+ public _rawCache: ResponseCache<[sql: string], Knex.Raw>
68
+
53
69
  constructor(public config: ORMConfig) {
54
70
  this.database = knex(
55
71
  config.database ?? {
@@ -61,13 +77,18 @@ export class ORM {
61
77
  },
62
78
  )
63
79
 
64
- this.handler = new Handler(config.location, {
80
+ this.handler = new Handler(config.tableLocation, {
65
81
  loader: (filepath) =>
66
82
  import(isCJS ? filepath : url.pathToFileURL(filepath).href).then(
67
83
  (file) => file.default,
68
84
  ),
69
85
  pattern: /\.js$/,
70
86
  })
87
+
88
+ this._rawCache = new ResponseCache(
89
+ (raw: string) => this.raw(raw),
90
+ config.caching ?? Infinity,
91
+ )
71
92
  }
72
93
 
73
94
  get cachedTables() {
@@ -79,7 +100,7 @@ export class ORM {
79
100
  }
80
101
 
81
102
  hasCachedTable(name: string) {
82
- return this.cachedTables.some((table) => table.options.name)
103
+ return this.cachedTables.some((table) => table.options.name === name)
83
104
  }
84
105
 
85
106
  async hasTable(name: string): Promise<boolean> {
@@ -92,21 +113,19 @@ export class ORM {
92
113
  async init() {
93
114
  await this.handler.init()
94
115
 
95
- try {
96
- await this.database.raw("PRAGMA foreign_keys = ON;")
97
- } catch (error) {}
98
-
99
- const migration = new Table<MigrationData>({
100
- name: "migration",
101
- priority: Infinity,
102
- setup: (table) => {
103
- table.string("table").unique().notNullable()
104
- table.integer("version").notNullable()
105
- },
106
- })
116
+ await enableForeignKeys(this)
107
117
 
108
- migration.orm = this
109
- await migration.make()
118
+ this.handler.elements.set(
119
+ "migration",
120
+ new Table<MigrationData>({
121
+ name: "migration",
122
+ priority: Infinity,
123
+ setup: (table) => {
124
+ table.string("table").unique().notNullable()
125
+ table.integer("version").notNullable()
126
+ },
127
+ }),
128
+ )
110
129
 
111
130
  for (const table of this.cachedTables.sort(
112
131
  (a, b) => (b.options.priority ?? 0) - (a.options.priority ?? 0),
@@ -114,82 +133,59 @@ export class ORM {
114
133
  table.orm = this
115
134
  await table.make()
116
135
  }
136
+
137
+ this._ready = true
117
138
  }
118
139
 
119
140
  raw(sql: Knex.Value): Knex.Raw {
141
+ if (this._ready) this.cache.invalidate()
120
142
  return this.database.raw(sql)
121
143
  }
122
144
 
123
- // /**
124
- // * Extract the database to a CSV file for each table.
125
- // */
126
- // async createBackup(dir = defaultBackupDir) {
127
- // const tables = [...this.handler.elements.values()]
128
- //
129
- // for (const table of tables) {
130
- // await this.database
131
- // .select()
132
- // .from(table.options.name)
133
- // .then(async (rows) => {
134
- // const csv = rows.map((row) => Object.values(row).join(",")).join("\n")
135
- //
136
- // return fs.promises.writeFile(
137
- // path.join(dir, `${table.options.name}.csv`),
138
- // csv,
139
- // "utf8",
140
- // )
141
- // })
142
- // }
143
- // }
144
- //
145
- // /**
146
- // * Import a CSV file for each table to the database.
147
- // */
148
- // async restoreBackup(dir = defaultBackupDir) {
149
- // const tables = [...this.handler.elements.values()].sort(
150
- // (a, b) => (b.options.priority ?? 0) - (a.options.priority ?? 0),
151
- // )
152
- //
153
- // for (const table of tables) {
154
- // const columnInfo = await table.getColumns()
155
- //
156
- // let csv: string
157
- //
158
- // try {
159
- // csv = await fs.promises.readFile(
160
- // path.join(dir, `${table.options.name}.csv`),
161
- // "utf8",
162
- // )
163
- // } catch (error) {
164
- // this.config.logger?.warn(
165
- // `missing backup file for table ${chalk[
166
- // this.config.loggerColors?.highlight ?? "blueBright"
167
- // ](table.options.name)}`,
168
- // )
169
- //
170
- // continue
171
- // }
172
- //
173
- // if (csv.trim().length === 0) continue
174
- //
175
- // const rows = csv
176
- // .split("\n")
177
- // .map((row) => row.split(","))
178
- // .map((row) => {
179
- // const data: any = {}
180
- //
181
- // let index = 0
182
- //
183
- // for (const [name, info] of Object.entries(columnInfo)) {
184
- // data[name] =
185
- // info.type === "integer" ? Number(row[index]) : row[index]
186
- // index++
187
- // }
188
- //
189
- // return data
190
- // })
191
- //
192
- // await this.database(table.options.name).insert(rows)
193
- // }
194
- // }
145
+ cache = {
146
+ raw: (sql: string, anyDataUpdated?: boolean): Knex.Raw => {
147
+ if (anyDataUpdated) this.cache.invalidate()
148
+ return this._rawCache!.get(sql, sql)
149
+ },
150
+ invalidate: () => {
151
+ this._rawCache.invalidate()
152
+ this.cachedTables.forEach((table) => table.cache.invalidate())
153
+ },
154
+ }
155
+
156
+ /**
157
+ * Create a backup of the database. <br>
158
+ * The backup will be saved in the location specified in the config.
159
+ */
160
+ async createBackup(dirname?: string) {
161
+ try {
162
+ for (let table of this.cachedTables) {
163
+ await backupTable(table, dirname)
164
+ }
165
+
166
+ console.log("Database backup created.")
167
+ } catch (error) {
168
+ console.error("Error while creating backup of the database.", error)
169
+ }
170
+ }
171
+
172
+ /**
173
+ * Restore the database from the backup. <br>
174
+ * @warning This will delete all the data in the tables.
175
+ */
176
+ async restoreBackup(dirname?: string) {
177
+ try {
178
+ await disableForeignKeys(this)
179
+
180
+ for (let table of this.cachedTables) {
181
+ await restoreBackup(table, dirname)
182
+ }
183
+
184
+ await enableForeignKeys(this)
185
+
186
+ console.log("Database restored from backup.")
187
+ } catch (error) {
188
+ console.error("Error while restoring backup of the database.", error)
189
+ }
190
+ }
195
191
  }