@vida-global/core 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -0
- package/index.js +17 -0
- package/lib/active_record/README.md +205 -0
- package/lib/active_record/baseRecord.js +112 -0
- package/lib/active_record/db/connection.js +128 -0
- package/lib/active_record/db/connectionConfiguration.js +114 -0
- package/lib/active_record/db/importSchema.js +4 -0
- package/lib/active_record/db/migration.js +132 -0
- package/lib/active_record/db/migrationTemplate.js +8 -0
- package/lib/active_record/db/migrationVersion.js +68 -0
- package/lib/active_record/db/migrator.js +169 -0
- package/lib/active_record/db/queryInterface.js +47 -0
- package/lib/active_record/db/schema.js +113 -0
- package/lib/active_record/index.js +6 -0
- package/lib/active_record/utils.js +43 -0
- package/lib/http/README.md +32 -0
- package/lib/http/client.js +129 -0
- package/lib/http/error.js +34 -0
- package/lib/logger/README.md +2 -0
- package/lib/logger/index.js +16 -0
- package/lib/release/develop.js +27 -0
- package/lib/release/git.js +86 -0
- package/lib/release/increment.js +56 -0
- package/lib/release/index.js +10 -0
- package/lib/release/release.js +30 -0
- package/lib/release/utils.js +44 -0
- package/lib/server/README.md +37 -0
- package/lib/server/index.js +9 -0
- package/lib/server/server.js +359 -0
- package/lib/server/serverController.js +344 -0
- package/lib/server/systemController.js +23 -0
- package/package.json +37 -0
- package/scripts/active_record/migrate.js +30 -0
- package/scripts/release.js +62 -0
- package/test/active_record/baseRecord.test.js +179 -0
- package/test/active_record/db/connection.test.js +221 -0
- package/test/active_record/db/connectionConfiguration.test.js +184 -0
- package/test/active_record/db/migrator.test.js +266 -0
- package/test/active_record/db/queryInterface.test.js +66 -0
- package/test/http/client.test.js +271 -0
- package/test/http/error.test.js +71 -0
- package/test/release/develop.test.js +57 -0
- package/test/release/git.test.js +189 -0
- package/test/release/increment.test.js +145 -0
- package/test/release/release.test.js +72 -0
- package/test/release/utils.test.js +148 -0
- package/test/server/helpers/controllers/barController.js +9 -0
- package/test/server/helpers/controllers/fooController.js +48 -0
- package/test/server/helpers/controllers/sub/bazController.js +10 -0
- package/test/server/helpers/server.js +14 -0
- package/test/server/server.test.js +188 -0
- package/test/server/serverController.test.js +251 -0
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
const { Connection } = require('./connection');
|
|
2
|
+
const fs = require('node:fs');
|
|
3
|
+
const { generateActiveRecordSchemaFiles } = require('./schema');
|
|
4
|
+
const { logger } = require('../../logger');
|
|
5
|
+
const { Migrator } = require('./migrator');
|
|
6
|
+
const utils = require('../utils');
|
|
7
|
+
const versioning = require('./migrationVersion');
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
/***********************************************************************************************
|
|
11
|
+
* MIGRATION UP
|
|
12
|
+
***********************************************************************************************/
|
|
13
|
+
async function runMigrations() {
|
|
14
|
+
const migrations = await migrationsToRun();
|
|
15
|
+
for (const migration of migrations) {
|
|
16
|
+
await runMigration(migration);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
await generateActiveRecordSchemaFiles();
|
|
20
|
+
|
|
21
|
+
Connection.closeAll();
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
async function runMigration(migration) {
|
|
26
|
+
const migrationStr = migration.fileName.replace(/\.js$/, '');
|
|
27
|
+
logger.info(`Running migration ${migrationStr}`);
|
|
28
|
+
|
|
29
|
+
await Migrator.runMigration(migration);
|
|
30
|
+
|
|
31
|
+
await versioning.markVersionAsPerformed(migration);
|
|
32
|
+
logger.info(`Completed migration ${migrationStr}`);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
async function migrationsToRun() {
|
|
37
|
+
const allMigrations = getMigrations();
|
|
38
|
+
const migrationsToRun = [];
|
|
39
|
+
|
|
40
|
+
for (const migration of allMigrations) {
|
|
41
|
+
if (await versioning.migrationHasRun(migration)) continue;
|
|
42
|
+
migrationsToRun.push(migration);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
return migrationsToRun;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
/***********************************************************************************************
|
|
50
|
+
* MIGRATION DOWN
|
|
51
|
+
***********************************************************************************************/
|
|
52
|
+
async function rollbackMigration() {
|
|
53
|
+
const migration = await migrationToRollback();
|
|
54
|
+
if (!migration) {
|
|
55
|
+
logger.info(`No migration to rollback`);
|
|
56
|
+
} else {
|
|
57
|
+
const migrationStr = migration.fileName.replace(/\.js$/, '');
|
|
58
|
+
logger.info(`Rolling back migration ${migrationStr}`);
|
|
59
|
+
|
|
60
|
+
await Migrator.rollbackMigration(migration);
|
|
61
|
+
|
|
62
|
+
await versioning.markVersionAsNotPerformed(migration);
|
|
63
|
+
logger.info(`Rolled back migration ${migrationStr}`);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
await generateActiveRecordSchemaFiles();
|
|
67
|
+
|
|
68
|
+
Connection.closeAll();
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
async function migrationToRollback() {
|
|
73
|
+
const allMigrations = getMigrations().reverse();
|
|
74
|
+
|
|
75
|
+
for (const migration of allMigrations) {
|
|
76
|
+
if (await versioning.migrationHasRun(migration)) return migration;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
return null;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
/***********************************************************************************************
|
|
84
|
+
* MIGRATION CREATION
|
|
85
|
+
***********************************************************************************************/
|
|
86
|
+
function generateMigrationFile(description, databaseId) {
|
|
87
|
+
utils.ensureMigrationsDirectory();
|
|
88
|
+
|
|
89
|
+
description = utils.toCamelCase(description);
|
|
90
|
+
const fileName = fileNameForMigration(description, databaseId);
|
|
91
|
+
const filePath = `${utils.migrationsDirectory}/${fileName}`;
|
|
92
|
+
const template = require('./migrationTemplate');
|
|
93
|
+
fs.writeFile(filePath, template, err => {
|
|
94
|
+
if (err) {
|
|
95
|
+
logger.error(`Unable to write new migration file to ${filePath}`);
|
|
96
|
+
} else {
|
|
97
|
+
logger.info(`New migration file written to ${filePath}`);
|
|
98
|
+
}
|
|
99
|
+
});
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
function fileNameForMigration(description, databaseId) {
|
|
104
|
+
const name = `${description}_${new Date().getTime()}`;
|
|
105
|
+
const dbSuffix = databaseId ? `.${databaseId}` : '';
|
|
106
|
+
return `${name}${dbSuffix}.js`;
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
/***********************************************************************************************
|
|
111
|
+
* UTILS
|
|
112
|
+
***********************************************************************************************/
|
|
113
|
+
function getMigrations() {
|
|
114
|
+
const files = fs.readdirSync(utils.migrationsDirectory);
|
|
115
|
+
const migrations = files.map(f => {
|
|
116
|
+
const m = f.match(/_(?<version>\d+)(\.(?<databaseId>\w+))?\.js$/);
|
|
117
|
+
if (!m) return;
|
|
118
|
+
return {fileName: f, databaseId: m.groups.databaseId, version: m.groups.version};
|
|
119
|
+
});
|
|
120
|
+
|
|
121
|
+
return migrations.filter(item => item)
|
|
122
|
+
.sort((m1, m2) => {
|
|
123
|
+
return parseInt(m1.version) - parseInt(m2.version);
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
module.exports = {
|
|
129
|
+
generateMigrationFile,
|
|
130
|
+
rollbackMigration,
|
|
131
|
+
runMigrations
|
|
132
|
+
}
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
const { Connection } = require('./connection');
|
|
2
|
+
const { DataTypes } = require('sequelize');
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
const performedVersions = {};
|
|
6
|
+
const models = {};
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
async function migrationHasRun({ version, databaseId }) {
|
|
10
|
+
const performedVersions = await performedVersionsForDatabase(databaseId);
|
|
11
|
+
return performedVersions.has(version)
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
async function markVersionAsPerformed({ version, databaseId }) {
|
|
16
|
+
const model = await versionModelForDatabase(databaseId);
|
|
17
|
+
await model.create({ version });
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
async function markVersionAsNotPerformed({ version, databaseId }) {
|
|
22
|
+
const model = await versionModelForDatabase(databaseId);
|
|
23
|
+
await model.destroy({where: { version }});
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
async function performedVersionsForDatabase(databaseId) {
|
|
28
|
+
if (performedVersions[databaseId]) return performedVersions[databaseId];
|
|
29
|
+
|
|
30
|
+
const model = await versionModelForDatabase(databaseId);
|
|
31
|
+
const records = await model.findAll()
|
|
32
|
+
const versions = new Set(records.map(({ version }) => version));
|
|
33
|
+
performedVersions[databaseId] = versions;
|
|
34
|
+
|
|
35
|
+
return performedVersions[databaseId];
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
async function versionModelForDatabase(databaseId) {
|
|
40
|
+
if (models[databaseId]) return models[databaseId];
|
|
41
|
+
|
|
42
|
+
const model = createVersionModelForDatabase(databaseId);
|
|
43
|
+
models[databaseId] = model;
|
|
44
|
+
await model.sync();
|
|
45
|
+
|
|
46
|
+
return model;
|
|
47
|
+
};
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
function createVersionModelForDatabase(databaseId) {
|
|
51
|
+
const connection = new Connection(databaseId);
|
|
52
|
+
return connection._sequelize.define('MigrationVersion', {
|
|
53
|
+
version: {
|
|
54
|
+
type: DataTypes.STRING,
|
|
55
|
+
primaryKey: true
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
}, {
|
|
59
|
+
tableName: 'migration_versions'
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
module.exports = {
|
|
65
|
+
markVersionAsNotPerformed,
|
|
66
|
+
markVersionAsPerformed,
|
|
67
|
+
migrationHasRun
|
|
68
|
+
};
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
const { Connection } = require('./connection');
|
|
2
|
+
const utils = require('../utils');
|
|
3
|
+
const { Sequelize, Op } = require('sequelize');
|
|
4
|
+
const { underscore } = require('inflection');
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class Migrator {
|
|
8
|
+
#connection;
|
|
9
|
+
#fileName;
|
|
10
|
+
#migrationModule;
|
|
11
|
+
|
|
12
|
+
DataTypes = Sequelize.DataTypes;
|
|
13
|
+
Operators = Op;
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
constructor(fileName, databaseId) {
|
|
17
|
+
this.#connection = new Connection(databaseId);
|
|
18
|
+
this.#fileName = fileName;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
async run() {
|
|
23
|
+
if (!this.migrationModule.up) throw new Error('Missing migration `up`');
|
|
24
|
+
|
|
25
|
+
const migrator = this;
|
|
26
|
+
await this.#connection.queryInterface.transaction(async () => {
|
|
27
|
+
await migrator.migrationModule.up.call(migrator);
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
async rollback() {
|
|
33
|
+
if (!this.migrationModule.down) return;
|
|
34
|
+
|
|
35
|
+
const migrator = this;
|
|
36
|
+
await this.#connection.queryInterface.transaction(async () => {
|
|
37
|
+
await migrator.migrationModule.down.call(migrator);
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
static async runMigration({ fileName, databaseId }) {
|
|
43
|
+
await (new Migrator(fileName, databaseId).run());
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
static async rollbackMigration({ fileName, databaseId }) {
|
|
48
|
+
await (new Migrator(fileName, databaseId).rollback());
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
get migrationModule() {
|
|
53
|
+
if (!this.#migrationModule) {
|
|
54
|
+
this.#migrationModule = this.importMigrationModule();
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
return {...this.#migrationModule};
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
importMigrationModule() {
|
|
62
|
+
const filePath = `${utils.migrationsDirectory}/${this.#fileName}`;
|
|
63
|
+
return require(filePath);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
/***********************************************************************************************
|
|
68
|
+
* MIGRATION ACTIONS
|
|
69
|
+
***********************************************************************************************/
|
|
70
|
+
async createTable(tableName, details, options={}) {
|
|
71
|
+
details = {...this.defaultIdColumn, ...details};
|
|
72
|
+
if (options.timestamps !== false) {
|
|
73
|
+
details = {...details, ...this.defaultTimestampColumns};
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
details = this.normalizeTableDetails(details);
|
|
77
|
+
await this.#sequelizeQueryInterface.createTable(tableName, details);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
normalizeTableDetails(details) {
|
|
82
|
+
const normalized = {};
|
|
83
|
+
|
|
84
|
+
for (const [colName, colDetails] of Object.entries(details)) {
|
|
85
|
+
const snakedColName = underscore(colName);
|
|
86
|
+
normalized[snakedColName] = colDetails;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
return normalized;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
get defaultIdColumn() {
|
|
94
|
+
return {
|
|
95
|
+
id: {
|
|
96
|
+
allowNull: false,
|
|
97
|
+
autoIncrement: true,
|
|
98
|
+
primaryKey: true,
|
|
99
|
+
type: this.DataTypes.INTEGER, }
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
get defaultTimestampColumns() {
|
|
105
|
+
return {
|
|
106
|
+
created_at: {
|
|
107
|
+
allowNull: false,
|
|
108
|
+
type: this.DataTypes.DATE },
|
|
109
|
+
updated_at: {
|
|
110
|
+
allowNull: false,
|
|
111
|
+
type: this.DataTypes.DATE }
|
|
112
|
+
};
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
async dropTable(tableName) {
|
|
117
|
+
await this.#sequelizeQueryInterface.dropTable(tableName);
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
async addColumn(tableName, columnName, columnDetails) {
|
|
122
|
+
await this.#sequelizeQueryInterface.addColumn(tableName, columnName, columnDetails);
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
async removeColumn(tableName, columnName) {
|
|
127
|
+
await this.#sequelizeQueryInterface.removeColumn(tableName, columnName);
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
async addIndex(tableName, fields, { concurrently, unique, name, where }) {
|
|
132
|
+
const options = {fields: fields};
|
|
133
|
+
if (concurrently) options.concurrently = true;
|
|
134
|
+
if (unique) options.unique = true;
|
|
135
|
+
if (name) options.name = name;
|
|
136
|
+
if (where) options.where = where;
|
|
137
|
+
|
|
138
|
+
await this.#sequelizeQueryInterface.addIndex(tableName, options);
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
async removeIndex(tableName, indexNameOrAttributes, concurrently=false) {
|
|
143
|
+
const options = {};
|
|
144
|
+
if (concurrently) options.concurrently = true;
|
|
145
|
+
await this.#sequelizeQueryInterface.removeIndex(tableName, indexNameOrAttributes, options);
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
async renameColumn(tableName, oldName, newName) {
|
|
150
|
+
newName = underscore(newName);
|
|
151
|
+
await this.#sequelizeQueryInterface.renameColumn(tableName, oldName, newName);
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
async changeColumn(tableName, columnName, dataTypeOrOptions) {
|
|
156
|
+
await this.#sequelizeQueryInterface.changeColumn(tableName, columnName, dataTypeOrOptions);
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
get #sequelizeQueryInterface() {
|
|
161
|
+
return this.#connection._sequelize.getQueryInterface();
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
module.exports = {
|
|
168
|
+
Migrator
|
|
169
|
+
};
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
const { Op } = require('sequelize');
|
|
2
|
+
|
|
3
|
+
class QueryInterface {
|
|
4
|
+
#connection;
|
|
5
|
+
|
|
6
|
+
Operators = Op;
|
|
7
|
+
|
|
8
|
+
constructor(connection) {
|
|
9
|
+
this.#connection = connection;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
async allTables() {
|
|
14
|
+
const queryInterface = this.#sequelizeQueryInterface;
|
|
15
|
+
const tableNames = await queryInterface.showAllTables()
|
|
16
|
+
const tables = {};
|
|
17
|
+
|
|
18
|
+
for (const tableName of tableNames) {
|
|
19
|
+
const table = await (queryInterface.describeTable(tableName));
|
|
20
|
+
tables[tableName] = table;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
return tables;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
async transaction(callback) {
|
|
28
|
+
await this.#sequelize.transaction(callback);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
get #sequelizeQueryInterface() {
|
|
34
|
+
return this.#sequelize.getQueryInterface();
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
get #sequelize() {
|
|
39
|
+
return this.#connection._sequelize;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
module.exports = {
|
|
46
|
+
QueryInterface
|
|
47
|
+
}
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
const { Connection } = require('./connection');
|
|
2
|
+
const { ConnectionConfiguration } = require('./connectionConfiguration');
|
|
3
|
+
const fs = require('node:fs');
|
|
4
|
+
const importSchema = require('./importSchema');
|
|
5
|
+
const { logger } = require('../../logger');
|
|
6
|
+
const { Sequelize } = require('sequelize');
|
|
7
|
+
const utils = require('../utils');
|
|
8
|
+
|
|
9
|
+
async function generateActiveRecordSchemaFiles() {
|
|
10
|
+
utils.ensureSchemasDirectory();
|
|
11
|
+
|
|
12
|
+
const databaseIds = ConnectionConfiguration.configuredDatabaseIds();
|
|
13
|
+
for (const databaseId of databaseIds) {
|
|
14
|
+
await generateSchemaFilesForDatabase(databaseId);
|
|
15
|
+
}
|
|
16
|
+
};
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
async function generateSchemaFilesForDatabase(databaseId) {
|
|
20
|
+
Connection.clearConnectionsCache();
|
|
21
|
+
const connection = new Connection(databaseId, {loggingEnabled: false});
|
|
22
|
+
const queryInterface = connection.queryInterface;
|
|
23
|
+
const tables = await queryInterface.allTables();
|
|
24
|
+
|
|
25
|
+
const filePath = filePathForSchemaFile(databaseId);
|
|
26
|
+
const schemas = `module.exports = ${JSON.stringify(tables, null, 4)};`;
|
|
27
|
+
|
|
28
|
+
fs.writeFile(filePath, schemas, err => {
|
|
29
|
+
if (err) {
|
|
30
|
+
logger.error(`Unable to write new schema file to ${filePath}`);
|
|
31
|
+
} else {
|
|
32
|
+
logger.info(`New schemas file written to ${filePath}`);
|
|
33
|
+
}
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
function getActiveRecordSchema(tableName, databaseId, dialect) {
|
|
39
|
+
const filePath = filePathForSchemaFile(databaseId);
|
|
40
|
+
const schema = importSchema(filePath, tableName);
|
|
41
|
+
if (!schema) throw new Error(`No schema found for table ${databaseId}.${tableName}`);
|
|
42
|
+
|
|
43
|
+
cleanSchema(schema, dialect);
|
|
44
|
+
|
|
45
|
+
return schema;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
function cleanSchema(schema, dialect) {
|
|
50
|
+
for (const [col, details] of Object.entries(schema)) {
|
|
51
|
+
const dataType = dataTypeForColumn(details, dialect);
|
|
52
|
+
if (dataType) details.type = dataType;
|
|
53
|
+
resolveAutoIncrementColumns(details);
|
|
54
|
+
resolveUUIDColumns(details);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
function resolveAutoIncrementColumns(details) {
|
|
59
|
+
if (!details.defaultValue) return;
|
|
60
|
+
|
|
61
|
+
const regExp = /^nextval\(\w+_seq::regclass\)$/;
|
|
62
|
+
if (details.defaultValue.match(regExp)) {
|
|
63
|
+
delete details.defaultValue;
|
|
64
|
+
delete details.allowNull;
|
|
65
|
+
details.autoIncrement = true;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
function dataTypeForColumn(details, dialect) {
|
|
71
|
+
const typeStr = details.type.replace(/\(.*\)/, '').toUpperCase();
|
|
72
|
+
|
|
73
|
+
const typesForDialect = Object.values(Sequelize.DataTypes).filter(val => val.types && val.types[dialect]);
|
|
74
|
+
for (const dataType of typesForDialect) {
|
|
75
|
+
if (dataType.key == typeStr) return dataType;
|
|
76
|
+
let typeStrs = dataType.types[dialect];
|
|
77
|
+
if (!typeStrs) typeStrs = [];
|
|
78
|
+
if (!Array.isArray(typeStrs)) {
|
|
79
|
+
typeStrs = Object.values(typeStrs).map(map => Object.values(map)).flat();
|
|
80
|
+
}
|
|
81
|
+
typeStrs = typeStrs.filter(Boolean);
|
|
82
|
+
typeStrs = typeStrs.map(aTypeStr => aTypeStr.toUpperCase());
|
|
83
|
+
if (typeStrs.includes(typeStr)) return Sequelize.DataTypes[dataType.key];
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
const typesByDialect = Object.values(Sequelize.DataTypes[dialect]);
|
|
87
|
+
for (const dataType of typesByDialect) {
|
|
88
|
+
if (dataType.key == typeStr) return dataType;
|
|
89
|
+
try {
|
|
90
|
+
const aTypeStr = dataType.prototype.toSql().replace(/\(.*\)/, '').toUpperCase();
|
|
91
|
+
if (aTypeStr == typeStr) return Sequelize.DataTypes[dataType.key];
|
|
92
|
+
} catch(err) {}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
if (typeStr.includes('CHAR')) return Sequelize.DataTypes.STRING;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
function resolveUUIDColumns(details) {
|
|
100
|
+
if (details.type != 'UUID' || !details.primaryKey) return;
|
|
101
|
+
details.defaultValue = Sequelize.UUIDV4;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
function filePathForSchemaFile(databaseId) {
|
|
106
|
+
return `${utils.schemasDirectory}/${databaseId}.js`;
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
module.exports = {
|
|
111
|
+
generateActiveRecordSchemaFiles,
|
|
112
|
+
getActiveRecordSchema
|
|
113
|
+
};
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
const fs = require('node:fs');
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
const configDirectory = `${process.cwd()}/config/db`;
|
|
5
|
+
const migrationsDirectory = `${configDirectory}/migrations`;
|
|
6
|
+
const schemasDirectory = `${configDirectory}/schemas`;
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
function ensureConfigDirectory() {
|
|
10
|
+
fs.mkdirSync(configDirectory, { recursive: true });
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
function ensureMigrationsDirectory() {
|
|
15
|
+
fs.mkdirSync(migrationsDirectory, { recursive: true });
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
function ensureSchemasDirectory() {
|
|
20
|
+
fs.mkdirSync(schemasDirectory, { recursive: true });
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
function toCamelCase(str) {
|
|
25
|
+
return str.split(/[-_\s]/).map((word, index) => {
|
|
26
|
+
if (index === 0) return word.toLowerCase();
|
|
27
|
+
return (
|
|
28
|
+
word.charAt(0).toUpperCase() +
|
|
29
|
+
word.slice(1).toLowerCase()
|
|
30
|
+
);
|
|
31
|
+
}).join("");
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
module.exports = {
|
|
36
|
+
configDirectory,
|
|
37
|
+
ensureConfigDirectory,
|
|
38
|
+
ensureMigrationsDirectory,
|
|
39
|
+
ensureSchemasDirectory,
|
|
40
|
+
migrationsDirectory,
|
|
41
|
+
schemasDirectory,
|
|
42
|
+
toCamelCase
|
|
43
|
+
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
# HttpClient
|
|
2
|
+
The `HttpClient` is a generic client for making HTTP requests. It can be used on its own, but is best suited for creating subclasses intended for specific APIs. By overriding the `urlRoot` and `defaultHeaders` methods, an API client can be made that reduces duplicated code.
|
|
3
|
+
```
|
|
4
|
+
class MyApiClient extends HttpClient {
|
|
5
|
+
constructor({dev, token}) {
|
|
6
|
+
this.#token = token;
|
|
7
|
+
this.#dev = dev;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
get defaultHeaders() {
|
|
11
|
+
const headers = super.defaultHeaders;
|
|
12
|
+
headers.Authorization = `Bearer ${this.#token}`;
|
|
13
|
+
return headers;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
get urlRoot() {
|
|
17
|
+
if (this.#dev) {
|
|
18
|
+
return "https://staging.foo.com";
|
|
19
|
+
} else {
|
|
20
|
+
return "https://foo.com";
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const client = new MyApiClient(true, '123abctoken');
|
|
26
|
+
const requestParameters = {baz: 1, ban: 2};
|
|
27
|
+
const response = await client.get("/foo/bar", {requestParameters});
|
|
28
|
+
|
|
29
|
+
const requestBody = {baz: 1, ban: 2};
|
|
30
|
+
const response = await client.post("/foo/bar", {requestBody});
|
|
31
|
+
```
|
|
32
|
+
|