@dbcube/schema-builder 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.npmignore +51 -0
- package/CONTRIBUTING.md +9 -0
- package/LICENSE +21 -0
- package/README.md +134 -0
- package/dist/index.cjs +375 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.mts +16 -0
- package/dist/index.d.ts +16 -0
- package/dist/index.js +338 -0
- package/dist/index.js.map +1 -0
- package/package.json +69 -0
- package/tsup.config.ts +14 -0
package/.npmignore
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
# Directories
|
|
2
|
+
examples
|
|
3
|
+
|
|
4
|
+
# Ignorar dependencias y configuraciones de desarrollo
|
|
5
|
+
node_modules/
|
|
6
|
+
npm-debug.log*
|
|
7
|
+
yarn-debug.log*
|
|
8
|
+
yarn-error.log*
|
|
9
|
+
|
|
10
|
+
# Ignorar carpetas y archivos irrelevantes
|
|
11
|
+
.vscode/
|
|
12
|
+
.lh
|
|
13
|
+
.idea/
|
|
14
|
+
.DS_Store
|
|
15
|
+
Thumbs.db
|
|
16
|
+
*.log
|
|
17
|
+
|
|
18
|
+
# Ignorar configuraciones del proyecto
|
|
19
|
+
.env
|
|
20
|
+
.env.*.local
|
|
21
|
+
package-lock.json
|
|
22
|
+
|
|
23
|
+
# Ignorar archivos del sistema
|
|
24
|
+
*.swp
|
|
25
|
+
*.swo
|
|
26
|
+
*.tmp
|
|
27
|
+
*.temp
|
|
28
|
+
|
|
29
|
+
# Ignorar carpetas de trabajo
|
|
30
|
+
temp/
|
|
31
|
+
logs/
|
|
32
|
+
debug/
|
|
33
|
+
|
|
34
|
+
# Ignorar archivos de compilación
|
|
35
|
+
src/
|
|
36
|
+
tsconfig.json
|
|
37
|
+
tsconfig.tsbuildinfo
|
|
38
|
+
|
|
39
|
+
# Ignorar pruebas y configuraciones
|
|
40
|
+
tests/
|
|
41
|
+
__tests__/
|
|
42
|
+
__mocks__/
|
|
43
|
+
coverage/
|
|
44
|
+
jest.config.js
|
|
45
|
+
|
|
46
|
+
# Ignorar documentación o ejemplos no necesarios
|
|
47
|
+
docs/
|
|
48
|
+
examples/
|
|
49
|
+
|
|
50
|
+
# Asegurarse de incluir solo lo esencial
|
|
51
|
+
!.npmignore
|
package/CONTRIBUTING.md
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
## Colaboración
|
|
2
|
+
|
|
3
|
+
Si deseas contribuir a este proyecto, sigue estos pasos:
|
|
4
|
+
|
|
5
|
+
1. Haz un fork del repositorio.
|
|
6
|
+
2. Crea una nueva rama (`git checkout -b feature/nueva-caracteristica`).
|
|
7
|
+
3. Realiza tus cambios y haz commit de ellos (`git commit -am 'Añadir nueva característica'`).
|
|
8
|
+
4. Sube tu rama (`git push origin feature/nueva-caracteristica`).
|
|
9
|
+
5. Abre un Pull Request.
|
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Albert Araya - Dbcube
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
# query-builder
|
|
2
|
+
|
|
3
|
+
The DBCube Query Builder is a lightweight, flexible, and fluent library for building queries across multiple database engines, including MySQL, PostgreSQL, SQLite, and MongoDB, using JavaScript/Node.js.
|
|
4
|
+
|
|
5
|
+
Its agnostic design allows you to generate data manipulation (DML) and data definition (DDL) operations with a clean, chainable syntax—without sacrificing power or expressiveness.
|
|
6
|
+
|
|
7
|
+
It’s designed to work seamlessly in both SQL and NoSQL environments, providing a consistent abstraction layer across different storage technologies while still leveraging the native capabilities of each engine.
|
|
8
|
+
|
|
9
|
+
## Features
|
|
10
|
+
|
|
11
|
+
- **Fluent API** for building SQL queries
|
|
12
|
+
- **Type-safe** query construction
|
|
13
|
+
- **Support for SELECT, INSERT, UPDATE, DELETE**
|
|
14
|
+
- **Advanced WHERE conditions** (AND, OR, groups, BETWEEN, IN, NULL checks)
|
|
15
|
+
- **JOINs**: INNER, LEFT, RIGHT
|
|
16
|
+
- **Aggregations**: COUNT, SUM, AVG, MAX, MIN
|
|
17
|
+
- **Ordering, Grouping, Distinct, Pagination**
|
|
18
|
+
- **Column management** (future extension)
|
|
19
|
+
- **Promise-based asynchronous API**
|
|
20
|
+
- **Singleton connection management**
|
|
21
|
+
|
|
22
|
+
## Installation
|
|
23
|
+
|
|
24
|
+
```bash
|
|
25
|
+
npm install @dbcube/query-builder
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
## Quick Start
|
|
29
|
+
|
|
30
|
+
```typescript
|
|
31
|
+
import Database from "@dbcube/query-builder";
|
|
32
|
+
|
|
33
|
+
const db = new Database("my_database");
|
|
34
|
+
|
|
35
|
+
// Select all users
|
|
36
|
+
const users = await db.table("users").get();
|
|
37
|
+
|
|
38
|
+
// Select users with conditions
|
|
39
|
+
const activeUsers = await db
|
|
40
|
+
.table("users")
|
|
41
|
+
.where("status", "=", "active")
|
|
42
|
+
.orderBy("created_at", "DESC")
|
|
43
|
+
.limit(10)
|
|
44
|
+
.get();
|
|
45
|
+
|
|
46
|
+
// Insert new users
|
|
47
|
+
await db
|
|
48
|
+
.table("users")
|
|
49
|
+
.insert([{ name: "John", email: "john@example.com", age: 30 }]);
|
|
50
|
+
|
|
51
|
+
// Update a user
|
|
52
|
+
await db.table("users").where("id", "=", 1).update({ status: "inactive" });
|
|
53
|
+
|
|
54
|
+
// Delete users
|
|
55
|
+
await db.table("users").where("status", "=", "deleted").delete();
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
## API Documentation
|
|
59
|
+
|
|
60
|
+
### Database
|
|
61
|
+
|
|
62
|
+
#### `new Database(name: string)`
|
|
63
|
+
|
|
64
|
+
Creates a new database connection instance.
|
|
65
|
+
|
|
66
|
+
#### `table(tableName: string): Table`
|
|
67
|
+
|
|
68
|
+
Returns a Table instance for building queries on the specified table.
|
|
69
|
+
|
|
70
|
+
### Table
|
|
71
|
+
|
|
72
|
+
#### Query Methods
|
|
73
|
+
|
|
74
|
+
- `select(fields?: string[])`: Specify columns to select.
|
|
75
|
+
- `where(column, operator, value)`: Add a WHERE condition.
|
|
76
|
+
- `orWhere(column, operator, value)`: Add an OR WHERE condition.
|
|
77
|
+
- `whereGroup(callback)`: Grouped WHERE conditions.
|
|
78
|
+
- `whereBetween(column, [min, max])`: WHERE BETWEEN condition.
|
|
79
|
+
- `whereIn(column, values)`: WHERE IN condition.
|
|
80
|
+
- `whereNull(column)`: WHERE IS NULL condition.
|
|
81
|
+
- `whereNotNull(column)`: WHERE IS NOT NULL condition.
|
|
82
|
+
- `join(table, column1, operator, column2)`: INNER JOIN.
|
|
83
|
+
- `leftJoin(table, column1, operator, column2)`: LEFT JOIN.
|
|
84
|
+
- `rightJoin(table, column1, operator, column2)`: RIGHT JOIN.
|
|
85
|
+
- `orderBy(column, direction)`: ORDER BY clause.
|
|
86
|
+
- `groupBy(column)`: GROUP BY clause.
|
|
87
|
+
- `distinct()`: DISTINCT clause.
|
|
88
|
+
- `count(column?)`: COUNT aggregation.
|
|
89
|
+
- `sum(column)`: SUM aggregation.
|
|
90
|
+
- `avg(column)`: AVG aggregation.
|
|
91
|
+
- `max(column)`: MAX aggregation.
|
|
92
|
+
- `min(column)`: MIN aggregation.
|
|
93
|
+
- `limit(number)`: LIMIT clause.
|
|
94
|
+
- `page(number)`: Pagination (requires limit).
|
|
95
|
+
|
|
96
|
+
#### Execution Methods
|
|
97
|
+
|
|
98
|
+
- `get()`: Execute and return all matching rows.
|
|
99
|
+
- `first()`: Execute and return the first matching row.
|
|
100
|
+
- `find(value, column?)`: Find a row by column value (default: id).
|
|
101
|
+
- `insert(data)`: Insert one or more rows.
|
|
102
|
+
- `update(data)`: Update rows matching the conditions.
|
|
103
|
+
- `delete()`: Delete rows matching the conditions.
|
|
104
|
+
|
|
105
|
+
## Example Usage
|
|
106
|
+
|
|
107
|
+
```typescript
|
|
108
|
+
// Complex query with joins, grouping, and aggregation
|
|
109
|
+
const results = await db
|
|
110
|
+
.table("orders")
|
|
111
|
+
.join("users", "orders.user_id", "=", "users.id")
|
|
112
|
+
.where("orders.status", "=", "completed")
|
|
113
|
+
.groupBy("users.country")
|
|
114
|
+
.sum("orders.total")
|
|
115
|
+
.orderBy("sum", "DESC")
|
|
116
|
+
.limit(5)
|
|
117
|
+
.get();
|
|
118
|
+
```
|
|
119
|
+
|
|
120
|
+
## Error Handling
|
|
121
|
+
|
|
122
|
+
All methods throw descriptive errors for invalid usage, such as missing WHERE conditions on update/delete, or invalid data types.
|
|
123
|
+
|
|
124
|
+
## License
|
|
125
|
+
|
|
126
|
+
This project is licensed under the MIT License.
|
|
127
|
+
|
|
128
|
+
## Contributing
|
|
129
|
+
|
|
130
|
+
Contributions are welcome! Please see [CONTRIBUTING.md](./CONTRIBUTING.md) for guidelines.
|
|
131
|
+
|
|
132
|
+
## About
|
|
133
|
+
|
|
134
|
+
dbcube-query-builder is part of the dbcube ecosystem, designed to provide a robust and flexible query building experience for modern Node.js applications.
|
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,375 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// src/index.ts
|
|
31
|
+
var index_exports = {};
|
|
32
|
+
__export(index_exports, {
|
|
33
|
+
Schema: () => Schema,
|
|
34
|
+
default: () => index_default
|
|
35
|
+
});
|
|
36
|
+
module.exports = __toCommonJS(index_exports);
|
|
37
|
+
|
|
38
|
+
// src/lib/Schema.ts
|
|
39
|
+
var import_fs = __toESM(require("fs"));
|
|
40
|
+
var import_core = require("@dbcube/core");
|
|
41
|
+
var import_path = __toESM(require("path"));
|
|
42
|
+
|
|
43
|
+
// src/lib/FileUtils.ts
|
|
44
|
+
var fs = __toESM(require("fs"));
|
|
45
|
+
var path = __toESM(require("path"));
|
|
46
|
+
var FileUtils = class {
|
|
47
|
+
/**
|
|
48
|
+
* Verifica si un archivo existe (asincrónico).
|
|
49
|
+
* @param filePath - Ruta del archivo.
|
|
50
|
+
* @returns True si el archivo existe, false si no.
|
|
51
|
+
*/
|
|
52
|
+
static async fileExists(filePath) {
|
|
53
|
+
return new Promise((resolve2) => {
|
|
54
|
+
fs.access(path.resolve(filePath), fs.constants.F_OK, (err) => {
|
|
55
|
+
resolve2(!err);
|
|
56
|
+
});
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
/**
|
|
60
|
+
* Verifica si un archivo existe (sincrónico).
|
|
61
|
+
* @param filePath - Ruta del archivo.
|
|
62
|
+
* @returns True si el archivo existe, false si no.
|
|
63
|
+
*/
|
|
64
|
+
static fileExistsSync(filePath) {
|
|
65
|
+
try {
|
|
66
|
+
fs.accessSync(path.resolve(filePath), fs.constants.F_OK);
|
|
67
|
+
return true;
|
|
68
|
+
} catch {
|
|
69
|
+
return false;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
static extractDatabaseName(input) {
|
|
73
|
+
const match = input.match(/@database\(["']?([\w-]+)["']?\)/);
|
|
74
|
+
return match ? match[1] : null;
|
|
75
|
+
}
|
|
76
|
+
/**
|
|
77
|
+
* Lee recursivamente archivos que terminan en un sufijo dado y los ordena numéricamente.
|
|
78
|
+
* @param dir - Directorio base (relativo o absoluto).
|
|
79
|
+
* @param suffix - Sufijo de archivo (como 'table.cube').
|
|
80
|
+
* @returns Rutas absolutas de los archivos encontrados y ordenados.
|
|
81
|
+
*/
|
|
82
|
+
static getCubeFilesRecursively(dir, suffix) {
|
|
83
|
+
const baseDir = path.resolve(dir);
|
|
84
|
+
const cubeFiles = [];
|
|
85
|
+
function recurse(currentDir) {
|
|
86
|
+
const entries = fs.readdirSync(currentDir, { withFileTypes: true });
|
|
87
|
+
for (const entry of entries) {
|
|
88
|
+
const fullPath = path.join(currentDir, entry.name);
|
|
89
|
+
if (entry.isDirectory()) {
|
|
90
|
+
recurse(fullPath);
|
|
91
|
+
} else if (entry.isFile() && entry.name.endsWith(suffix)) {
|
|
92
|
+
cubeFiles.push(fullPath);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
recurse(baseDir);
|
|
97
|
+
cubeFiles.sort((a, b) => {
|
|
98
|
+
const aNum = parseInt(path.basename(a));
|
|
99
|
+
const bNum = parseInt(path.basename(b));
|
|
100
|
+
return (isNaN(aNum) ? 0 : aNum) - (isNaN(bNum) ? 0 : bNum);
|
|
101
|
+
});
|
|
102
|
+
return cubeFiles;
|
|
103
|
+
}
|
|
104
|
+
};
|
|
105
|
+
var FileUtils_default = FileUtils;
|
|
106
|
+
|
|
107
|
+
// src/lib/Schema.ts
|
|
108
|
+
var Schema = class {
|
|
109
|
+
name;
|
|
110
|
+
engine;
|
|
111
|
+
constructor(name) {
|
|
112
|
+
this.name = name;
|
|
113
|
+
const engine = new import_core.Engine(name);
|
|
114
|
+
this.engine = engine;
|
|
115
|
+
}
|
|
116
|
+
async createDatabase() {
|
|
117
|
+
const rootPath = import_path.default.resolve(process.cwd());
|
|
118
|
+
const response = await this.engine.run("schema_engine", [
|
|
119
|
+
"--action",
|
|
120
|
+
"create_database",
|
|
121
|
+
"--path",
|
|
122
|
+
rootPath
|
|
123
|
+
]);
|
|
124
|
+
if (response.status != 200) {
|
|
125
|
+
returnFormattedError(response.status, response.message);
|
|
126
|
+
}
|
|
127
|
+
return response.data;
|
|
128
|
+
}
|
|
129
|
+
async refreshTables() {
|
|
130
|
+
const cubesDir = import_path.default.join(process.cwd(), "dbcube", "cubes");
|
|
131
|
+
if (!import_fs.default.existsSync(cubesDir)) {
|
|
132
|
+
throw new Error("\u274C The cubes folder does not exist");
|
|
133
|
+
}
|
|
134
|
+
const cubeFiles = FileUtils_default.getCubeFilesRecursively("dbcube", "table.cube");
|
|
135
|
+
if (cubeFiles.length === 0) {
|
|
136
|
+
throw new Error("\u274C There are no cubes to execute");
|
|
137
|
+
} else {
|
|
138
|
+
for (const file of cubeFiles) {
|
|
139
|
+
const filePath = import_path.default.isAbsolute(file) ? file : import_path.default.join(cubesDir, file);
|
|
140
|
+
const stats = import_fs.default.statSync(filePath);
|
|
141
|
+
if (stats.isFile()) {
|
|
142
|
+
const dml = await this.engine.run("schema_engine", [
|
|
143
|
+
"--action",
|
|
144
|
+
"parse_table",
|
|
145
|
+
"--mode",
|
|
146
|
+
"refresh",
|
|
147
|
+
"--schema-path",
|
|
148
|
+
filePath
|
|
149
|
+
]);
|
|
150
|
+
if (dml.status != 200) {
|
|
151
|
+
returnFormattedError(dml.status, dml.message);
|
|
152
|
+
}
|
|
153
|
+
const parseJson = JSON.stringify(dml.data.actions).replace(/[\r\n\t]/g, "").replace(/\\[rnt]/g, "").replace(/\s{2,}/g, " ");
|
|
154
|
+
const queries = await this.engine.run("schema_engine", [
|
|
155
|
+
"--action",
|
|
156
|
+
"generate",
|
|
157
|
+
"--mode",
|
|
158
|
+
"refresh",
|
|
159
|
+
"--dml",
|
|
160
|
+
parseJson
|
|
161
|
+
]);
|
|
162
|
+
if (queries.status != 200) {
|
|
163
|
+
returnFormattedError(queries.status, queries.message);
|
|
164
|
+
}
|
|
165
|
+
delete queries.data.database_type;
|
|
166
|
+
const parseJsonQueries = JSON.stringify(queries.data);
|
|
167
|
+
const response = await this.engine.run("schema_engine", [
|
|
168
|
+
"--action",
|
|
169
|
+
"execute",
|
|
170
|
+
"--mode",
|
|
171
|
+
"refresh",
|
|
172
|
+
"--dml",
|
|
173
|
+
parseJsonQueries
|
|
174
|
+
]);
|
|
175
|
+
if (response.status != 200) {
|
|
176
|
+
returnFormattedError(response.status, response.message);
|
|
177
|
+
}
|
|
178
|
+
const createQuery = queries.data.regular_queries.filter((q) => q.includes("CREATE"))[0];
|
|
179
|
+
await import_core.TableProcessor.saveQuery(dml.data.table, dml.data.database, createQuery);
|
|
180
|
+
return response.data;
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
return null;
|
|
185
|
+
}
|
|
186
|
+
async freshTables() {
|
|
187
|
+
const cubesDir = import_path.default.join(process.cwd(), "dbcube", "cubes");
|
|
188
|
+
if (!import_fs.default.existsSync(cubesDir)) {
|
|
189
|
+
throw new Error("\u274C The cubes folder does not exist");
|
|
190
|
+
}
|
|
191
|
+
const cubeFiles = FileUtils_default.getCubeFilesRecursively("dbcube", "table.cube");
|
|
192
|
+
if (cubeFiles.length === 0) {
|
|
193
|
+
throw new Error("\u274C There are no cubes to execute");
|
|
194
|
+
} else {
|
|
195
|
+
for (const file of cubeFiles) {
|
|
196
|
+
const filePath = import_path.default.isAbsolute(file) ? file : import_path.default.join(cubesDir, file);
|
|
197
|
+
const stats = import_fs.default.statSync(filePath);
|
|
198
|
+
if (stats.isFile()) {
|
|
199
|
+
const dml = await this.engine.run("schema_engine", [
|
|
200
|
+
"--action",
|
|
201
|
+
"parse_table",
|
|
202
|
+
"--schema-path",
|
|
203
|
+
filePath,
|
|
204
|
+
"--mode",
|
|
205
|
+
"fresh"
|
|
206
|
+
]);
|
|
207
|
+
if (dml.status != 200) {
|
|
208
|
+
returnFormattedError(dml.status, dml.message);
|
|
209
|
+
}
|
|
210
|
+
const parseJson = JSON.stringify(dml.data.actions).replace(/[\r\n\t]/g, "").replace(/\\[rnt]/g, "").replace(/\s{2,}/g, " ");
|
|
211
|
+
const queries = await this.engine.run("schema_engine", [
|
|
212
|
+
"--action",
|
|
213
|
+
"generate",
|
|
214
|
+
"--dml",
|
|
215
|
+
parseJson
|
|
216
|
+
]);
|
|
217
|
+
if (queries.status != 200) {
|
|
218
|
+
returnFormattedError(queries.status, queries.message);
|
|
219
|
+
}
|
|
220
|
+
delete queries.data._type;
|
|
221
|
+
const createQuery = queries.data.regular_queries.filter((q) => q.includes("CREATE"))[0];
|
|
222
|
+
if (queries.data.regular_queries.length > 0) {
|
|
223
|
+
const nowQueries = await import_core.TableProcessor.generateAlterQueries(queries.data.regular_queries[0], dml.data.motor, dml.data.table, dml.data.database);
|
|
224
|
+
queries.data.regular_queries = nowQueries;
|
|
225
|
+
}
|
|
226
|
+
const parseJsonQueries = JSON.stringify(queries.data);
|
|
227
|
+
const response = await this.engine.run("schema_engine", [
|
|
228
|
+
"--action",
|
|
229
|
+
"execute",
|
|
230
|
+
"--mode",
|
|
231
|
+
"fresh",
|
|
232
|
+
"--dml",
|
|
233
|
+
parseJsonQueries
|
|
234
|
+
]);
|
|
235
|
+
if (response.status != 200) {
|
|
236
|
+
returnFormattedError(response.status, response.message);
|
|
237
|
+
}
|
|
238
|
+
await import_core.TableProcessor.saveQuery(dml.data.table, dml.data.database, createQuery);
|
|
239
|
+
return response.data;
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
return null;
|
|
244
|
+
}
|
|
245
|
+
async executeSeeders() {
|
|
246
|
+
const cubesDir = import_path.default.join(process.cwd(), "dbcube", "cubes");
|
|
247
|
+
if (!import_fs.default.existsSync(cubesDir)) {
|
|
248
|
+
throw new Error("\u274C The cubes folder does not exist");
|
|
249
|
+
}
|
|
250
|
+
const cubeFiles = FileUtils_default.getCubeFilesRecursively("dbcube", "seeder.cube");
|
|
251
|
+
if (cubeFiles.length === 0) {
|
|
252
|
+
throw new Error("\u274C There are no cubes to execute");
|
|
253
|
+
} else {
|
|
254
|
+
for (const file of cubeFiles) {
|
|
255
|
+
const filePath = import_path.default.isAbsolute(file) ? file : import_path.default.join(cubesDir, file);
|
|
256
|
+
const stats = import_fs.default.statSync(filePath);
|
|
257
|
+
if (stats.isFile()) {
|
|
258
|
+
const response = await this.engine.run("schema_engine", [
|
|
259
|
+
"--action",
|
|
260
|
+
"seeder",
|
|
261
|
+
"--schema-path",
|
|
262
|
+
filePath
|
|
263
|
+
]);
|
|
264
|
+
if (response.status != 200) {
|
|
265
|
+
returnFormattedError(response.status, response.message);
|
|
266
|
+
}
|
|
267
|
+
return response.data;
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
return null;
|
|
272
|
+
}
|
|
273
|
+
async executeTriggers() {
|
|
274
|
+
const cubesDir = import_path.default.join(process.cwd(), "dbcube", "cubes");
|
|
275
|
+
const triggersDirExit = import_path.default.join(process.cwd(), "dbcube", "triggers");
|
|
276
|
+
if (!import_fs.default.existsSync(cubesDir)) {
|
|
277
|
+
throw new Error("\u274C The cubes folder does not exist");
|
|
278
|
+
}
|
|
279
|
+
const cubeFiles = FileUtils_default.getCubeFilesRecursively("dbcube", "trigger.cube");
|
|
280
|
+
if (cubeFiles.length === 0) {
|
|
281
|
+
throw new Error("\u274C There are no cubes to execute");
|
|
282
|
+
} else {
|
|
283
|
+
for (const file of cubeFiles) {
|
|
284
|
+
const filePath = import_path.default.isAbsolute(file) ? file : import_path.default.join(cubesDir, file);
|
|
285
|
+
const stats = import_fs.default.statSync(filePath);
|
|
286
|
+
if (stats.isFile()) {
|
|
287
|
+
const response = await this.engine.run("schema_engine", [
|
|
288
|
+
"--action",
|
|
289
|
+
"trigger",
|
|
290
|
+
"--path-exit",
|
|
291
|
+
triggersDirExit,
|
|
292
|
+
"--schema-path",
|
|
293
|
+
filePath
|
|
294
|
+
]);
|
|
295
|
+
if (response.status != 200) {
|
|
296
|
+
returnFormattedError(response.status, response.message);
|
|
297
|
+
}
|
|
298
|
+
return response.data;
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
return null;
|
|
303
|
+
}
|
|
304
|
+
};
|
|
305
|
+
function returnFormattedError(status, message) {
|
|
306
|
+
const RESET = "\x1B[0m";
|
|
307
|
+
const RED = "\x1B[31m";
|
|
308
|
+
const YELLOW = "\x1B[33m";
|
|
309
|
+
const BOLD = "\x1B[1m";
|
|
310
|
+
const CYAN = "\x1B[36m";
|
|
311
|
+
const GRAY = "\x1B[90m";
|
|
312
|
+
const UNDERLINE = "\x1B[4m";
|
|
313
|
+
const MAGENTA = "\x1B[35m";
|
|
314
|
+
let output = "";
|
|
315
|
+
let help = "";
|
|
316
|
+
const color = status === 600 ? YELLOW : RED;
|
|
317
|
+
if (message.includes("[help]")) {
|
|
318
|
+
const parts = message.split("[help]");
|
|
319
|
+
output += `
|
|
320
|
+
${RED}${BOLD}${parts[0]}${RESET}`;
|
|
321
|
+
help += `
|
|
322
|
+
${MAGENTA}${BOLD}[help]${RESET} ${GRAY}${parts[1]}${RESET}
|
|
323
|
+
`;
|
|
324
|
+
} else {
|
|
325
|
+
output += `
|
|
326
|
+
${color}${BOLD}${message}${RESET}
|
|
327
|
+
`;
|
|
328
|
+
}
|
|
329
|
+
const err = new Error();
|
|
330
|
+
const stackLines = err.stack?.split("\n") || [];
|
|
331
|
+
const relevantStackLine = stackLines.find(
|
|
332
|
+
(line) => line.includes(".js:") && !line.includes("node_modules")
|
|
333
|
+
);
|
|
334
|
+
if (relevantStackLine) {
|
|
335
|
+
const match = relevantStackLine.match(/\((.*):(\d+):(\d+)\)/) || relevantStackLine.match(/at (.*):(\d+):(\d+)/);
|
|
336
|
+
if (match) {
|
|
337
|
+
const [, filePath, lineStr, columnStr] = match;
|
|
338
|
+
const lineNum = parseInt(lineStr, 10);
|
|
339
|
+
const errorLocation = `${filePath}:${lineStr}:${columnStr}`;
|
|
340
|
+
try {
|
|
341
|
+
const codeLines = import_fs.default.readFileSync(filePath, "utf-8").split("\n");
|
|
342
|
+
const start = Math.max(0, lineNum - 3);
|
|
343
|
+
const end = Math.min(codeLines.length, lineNum + 2);
|
|
344
|
+
output += `
|
|
345
|
+
${CYAN}${BOLD}[code] ${RESET}${YELLOW} ${UNDERLINE}${errorLocation}${RESET}
|
|
346
|
+
`;
|
|
347
|
+
for (let i = start; i < end; i++) {
|
|
348
|
+
const line = codeLines[i];
|
|
349
|
+
const lineLabel = `${i + 1}`.padStart(4, " ");
|
|
350
|
+
const pointer = i + 1 === lineNum ? `${RED}<-${RESET}` : " ";
|
|
351
|
+
output += `${GRAY}${lineLabel}${RESET} ${pointer} ${line}
|
|
352
|
+
`;
|
|
353
|
+
}
|
|
354
|
+
} catch (err2) {
|
|
355
|
+
output += `${YELLOW}\u26A0\uFE0F No se pudo leer el archivo de origen: ${filePath}${RESET}
|
|
356
|
+
`;
|
|
357
|
+
output += `
|
|
358
|
+
${CYAN}${BOLD}Stack Trace:${RESET}
|
|
359
|
+
${stackLines.slice(2).join("\n")}
|
|
360
|
+
`;
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
output += help;
|
|
365
|
+
console.error(output);
|
|
366
|
+
process.exit(1);
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
// src/index.ts
|
|
370
|
+
var index_default = Schema;
|
|
371
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
372
|
+
0 && (module.exports = {
|
|
373
|
+
Schema
|
|
374
|
+
});
|
|
375
|
+
//# sourceMappingURL=index.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/lib/Schema.ts","../src/lib/FileUtils.ts"],"sourcesContent":["import { Schema } from './lib/Schema';\r\n\r\nexport default Schema;\r\nexport { Schema };","import fs from 'fs';\r\nimport { Engine, TableProcessor } from \"@dbcube/core\";\r\nimport path from 'path';\r\nimport FileUtils from './FileUtils';\r\n\r\n/**\r\n * Main class to handle MySQL database connections and queries.\r\n * Implements the Singleton pattern to ensure a single instance of the connection pool.\r\n */\r\nclass Schema {\r\n private name: string;\r\n private engine: any;\r\n\r\n constructor(name: string) {\r\n this.name = name; \r\n const engine = new Engine(name);\r\n this.engine = engine;\r\n }\r\n\r\n async createDatabase(): Promise<any> {\r\n const rootPath = path.resolve(process.cwd());\r\n const response = await this.engine.run('schema_engine',[\r\n '--action', 'create_database',\r\n '--path', rootPath,\r\n ]);\r\n if(response.status!=200){\r\n returnFormattedError(response.status, response.message);\r\n }\r\n return response.data;\r\n }\r\n\r\n async refreshTables(): Promise<any> {\r\n const cubesDir = path.join(process.cwd(), 'dbcube', 'cubes');\r\n \r\n // Verificar si la carpeta existe\r\n if (!fs.existsSync(cubesDir)) {\r\n throw new Error('❌ The cubes folder does not exist');\r\n }\r\n\r\n const cubeFiles = FileUtils.getCubeFilesRecursively('dbcube', 'table.cube');\r\n if (cubeFiles.length === 0) {\r\n throw new Error('❌ There are no cubes to execute');\r\n } else { \r\n for (const file of cubeFiles) {\r\n const filePath = path.isAbsolute(file) ? file : path.join(cubesDir, file);\r\n const stats = fs.statSync(filePath);\r\n if (stats.isFile()) {\r\n const dml = await this.engine.run('schema_engine',[\r\n '--action', 'parse_table',\r\n '--mode', 'refresh',\r\n '--schema-path', filePath,\r\n ]);\r\n if(dml.status!=200){\r\n returnFormattedError(dml.status, dml.message);\r\n }\r\n const parseJson = JSON.stringify(dml.data.actions).replace(/[\\r\\n\\t]/g, '').replace(/\\\\[rnt]/g, '').replace(/\\s{2,}/g, ' '); \r\n\r\n const queries = await this.engine.run('schema_engine',[\r\n '--action', 'generate',\r\n '--mode', 'refresh',\r\n '--dml', parseJson,\r\n ]);\r\n if(queries.status!=200){\r\n returnFormattedError(queries.status, queries.message);\r\n }\r\n delete queries.data.database_type;\r\n \r\n const parseJsonQueries = JSON.stringify(queries.data); \r\n\r\n const response = await this.engine.run('schema_engine',[\r\n '--action', 'execute',\r\n '--mode', 'refresh',\r\n '--dml', parseJsonQueries,\r\n ]);\r\n\r\n if(response.status!=200){\r\n returnFormattedError(response.status, response.message);\r\n }\r\n const createQuery = queries.data.regular_queries.filter((q:string) => q.includes(\"CREATE\"))[0];\r\n \r\n await TableProcessor.saveQuery(dml.data.table, dml.data.database, createQuery);\r\n\r\n return response.data;\r\n \r\n }\r\n }\r\n }\r\n return null;\r\n }\r\n\r\n async freshTables(): Promise<any> {\r\n const cubesDir = path.join(process.cwd(), 'dbcube', 'cubes');\r\n \r\n // Verificar si la carpeta existe\r\n if (!fs.existsSync(cubesDir)) {\r\n throw new Error('❌ The cubes folder does not exist');\r\n }\r\n\r\n const cubeFiles = FileUtils.getCubeFilesRecursively('dbcube', 'table.cube');\r\n if (cubeFiles.length === 0) {\r\n throw new Error('❌ There are no cubes to execute');\r\n } else { \r\n for (const file of cubeFiles) {\r\n const filePath = path.isAbsolute(file) ? file : path.join(cubesDir, file);\r\n const stats = fs.statSync(filePath);\r\n if (stats.isFile()) {\r\n const dml = await this.engine.run('schema_engine',[\r\n '--action', 'parse_table',\r\n '--schema-path', filePath,\r\n '--mode', 'fresh',\r\n ]);\r\n if(dml.status!=200){\r\n returnFormattedError(dml.status, dml.message);\r\n }\r\n const parseJson = JSON.stringify(dml.data.actions).replace(/[\\r\\n\\t]/g, '').replace(/\\\\[rnt]/g, '').replace(/\\s{2,}/g, ' '); \r\n\r\n const queries = await this.engine.run('schema_engine',[\r\n '--action', 'generate',\r\n '--dml', parseJson,\r\n ]);\r\n if(queries.status!=200){\r\n returnFormattedError(queries.status, queries.message);\r\n }\r\n delete queries.data. _type;\r\n \r\n const createQuery = queries.data.regular_queries.filter((q:string) => q.includes(\"CREATE\"))[0];\r\n \r\n if(queries.data.regular_queries.length>0){\r\n const nowQueries = await TableProcessor.generateAlterQueries(queries.data.regular_queries[0], dml.data.motor, dml.data.table, dml.data.database);\r\n queries.data.regular_queries = nowQueries;\r\n }\r\n\r\n const parseJsonQueries = JSON.stringify(queries.data); \r\n\r\n const response = await this.engine.run('schema_engine',[\r\n '--action', 'execute',\r\n '--mode', 'fresh',\r\n '--dml', parseJsonQueries,\r\n ]);\r\n\r\n if(response.status!=200){\r\n returnFormattedError(response.status, response.message);\r\n }\r\n \r\n await TableProcessor.saveQuery(dml.data.table, dml.data.database, createQuery);\r\n\r\n return response.data;\r\n \r\n }\r\n }\r\n }\r\n return null;\r\n }\r\n\r\n async executeSeeders(): Promise<any> {\r\n const cubesDir = path.join(process.cwd(), 'dbcube', 'cubes');\r\n \r\n // Verificar si la carpeta existe\r\n if (!fs.existsSync(cubesDir)) {\r\n throw new Error('❌ The cubes folder does not exist');\r\n }\r\n\r\n const cubeFiles = FileUtils.getCubeFilesRecursively('dbcube', 'seeder.cube');\r\n\r\n if (cubeFiles.length === 0) {\r\n throw new Error('❌ There are no cubes to execute');\r\n } else { \r\n for (const file of cubeFiles) {\r\n const filePath = path.isAbsolute(file) ? file : path.join(cubesDir, file);\r\n const stats = fs.statSync(filePath);\r\n \r\n if (stats.isFile()) {\r\n\r\n const response = await this.engine.run('schema_engine',[\r\n '--action', 'seeder',\r\n '--schema-path', filePath,\r\n ]);\r\n\r\n if(response.status!=200){\r\n returnFormattedError(response.status, response.message);\r\n }\r\n\r\n return response.data;\r\n \r\n }\r\n }\r\n }\r\n return null;\r\n }\r\n\r\n async executeTriggers(): Promise<any> {\r\n const cubesDir = path.join(process.cwd(), 'dbcube', 'cubes');\r\n const triggersDirExit = path.join(process.cwd(), 'dbcube', 'triggers');\r\n \r\n // Verificar si la carpeta existe\r\n if (!fs.existsSync(cubesDir)) {\r\n throw new Error('❌ The cubes folder does not exist');\r\n }\r\n\r\n const cubeFiles = FileUtils.getCubeFilesRecursively('dbcube', 'trigger.cube');\r\n\r\n if (cubeFiles.length === 0) {\r\n throw new Error('❌ There are no cubes to execute');\r\n } else { \r\n for (const file of cubeFiles) {\r\n const filePath = path.isAbsolute(file) ? file : path.join(cubesDir, file);\r\n const stats = fs.statSync(filePath);\r\n \r\n if (stats.isFile()) {\r\n\r\n const response = await this.engine.run('schema_engine',[\r\n '--action', 'trigger',\r\n '--path-exit', triggersDirExit,\r\n '--schema-path', filePath,\r\n ]);\r\n\r\n if(response.status!=200){\r\n returnFormattedError(response.status, response.message);\r\n }\r\n\r\n return response.data;\r\n \r\n }\r\n }\r\n }\r\n return null;\r\n }\r\n}\r\n\r\n\r\nfunction returnFormattedError(status: number, message: string) {\r\n const RESET = '\\x1b[0m';\r\n const RED = '\\x1b[31m';\r\n const YELLOW = '\\x1b[33m';\r\n const BOLD = '\\x1b[1m';\r\n const CYAN = '\\x1b[36m';\r\n const GRAY = '\\x1b[90m';\r\n const UNDERLINE = '\\x1b[4m';\r\n const MAGENTA = '\\x1b[35m';\r\n\r\n let output = '';\r\n let help = '';\r\n const color = status === 600 ? YELLOW : RED;\r\n\r\n \r\n if (message.includes(\"[help]\")) {\r\n const parts = message.split(\"[help]\");\r\n output += `\\n${RED}${BOLD}${parts[0]}${RESET}`;\r\n help += `\\n${MAGENTA}${BOLD}[help]${RESET} ${GRAY}${parts[1]}${RESET}\\n`;\r\n } else {\r\n output += `\\n${color}${BOLD}${message}${RESET}\\n`;\r\n }\r\n\r\n const err = new Error();\r\n const stackLines = err.stack?.split('\\n') || [];\r\n\r\n // Buscamos la primera línea del stack fuera de node_modules\r\n const relevantStackLine = stackLines.find(line => \r\n line.includes('.js:') && !line.includes('node_modules')\r\n );\r\n\r\n if (relevantStackLine) {\r\n const match = relevantStackLine.match(/\\((.*):(\\d+):(\\d+)\\)/) || \r\n relevantStackLine.match(/at (.*):(\\d+):(\\d+)/);\r\n\r\n if (match) {\r\n const [, filePath, lineStr, columnStr] = match;\r\n const lineNum = parseInt(lineStr, 10);\r\n const errorLocation = `${filePath}:${lineStr}:${columnStr}`;\r\n\r\n // Leemos el archivo y sacamos las líneas relevantes\r\n try {\r\n const codeLines = fs.readFileSync(filePath, 'utf-8').split('\\n');\r\n const start = Math.max(0, lineNum - 3);\r\n const end = Math.min(codeLines.length, lineNum + 2);\r\n\r\n output += `\\n${CYAN}${BOLD}[code] ${RESET}${YELLOW} ${UNDERLINE}${errorLocation}${RESET}\\n`;\r\n\r\n for (let i = start; i < end; i++) {\r\n const line = codeLines[i];\r\n const lineLabel = `${i + 1}`.padStart(4, ' ');\r\n const pointer = i + 1 === lineNum ? `${RED}<-${RESET}` : ' ';\r\n output += `${GRAY}${lineLabel}${RESET} ${pointer} ${line}\\n`;\r\n }\r\n } catch (err) {\r\n output += `${YELLOW}⚠️ No se pudo leer el archivo de origen: ${filePath}${RESET}\\n`;\r\n output += `\\n${CYAN}${BOLD}Stack Trace:${RESET}\\n${stackLines.slice(2).join('\\n')}\\n`;\r\n }\r\n }\r\n } \r\n output += help;\r\n console.error(output);\r\n process.exit(1);\r\n}\r\n\r\nexport default Schema;\r\nexport { Schema };\r\n","import * as fs from 'fs';\r\nimport * as path from 'path';\r\n\r\nclass FileUtils {\r\n /**\r\n * Verifica si un archivo existe (asincrónico).\r\n * @param filePath - Ruta del archivo.\r\n * @returns True si el archivo existe, false si no.\r\n */\r\n static async fileExists(filePath: string): Promise<boolean> {\r\n return new Promise((resolve) => {\r\n fs.access(path.resolve(filePath), fs.constants.F_OK, (err) => {\r\n resolve(!err);\r\n });\r\n });\r\n }\r\n\r\n /**\r\n * Verifica si un archivo existe (sincrónico).\r\n * @param filePath - Ruta del archivo.\r\n * @returns True si el archivo existe, false si no.\r\n */\r\n static fileExistsSync(filePath: string): boolean {\r\n try {\r\n fs.accessSync(path.resolve(filePath), fs.constants.F_OK);\r\n return true;\r\n } catch {\r\n return false;\r\n }\r\n }\r\n\r\n static extractDatabaseName(input: string): string | null {\r\n const match = input.match(/@database\\([\"']?([\\w-]+)[\"']?\\)/);\r\n return match ? match[1] : null;\r\n }\r\n\r\n /**\r\n * Lee recursivamente archivos que terminan en un sufijo dado y los ordena numéricamente.\r\n * @param dir - Directorio base (relativo o absoluto).\r\n * @param suffix - Sufijo de archivo (como 'table.cube').\r\n * @returns Rutas absolutas de los archivos encontrados y ordenados.\r\n */\r\n static getCubeFilesRecursively(dir: string, suffix: string): string[] {\r\n const baseDir = path.resolve(dir); // ✅ Asegura que sea absoluto\r\n const cubeFiles: string[] = [];\r\n\r\n function recurse(currentDir: string): void {\r\n const entries = fs.readdirSync(currentDir, { withFileTypes: true });\r\n \r\n for (const entry of entries) {\r\n const fullPath = path.join(currentDir, entry.name);\r\n \r\n if (entry.isDirectory()) {\r\n recurse(fullPath);\r\n } else if (entry.isFile() && entry.name.endsWith(suffix)) {\r\n cubeFiles.push(fullPath); // Ya es absoluta\r\n }\r\n }\r\n }\r\n\r\n recurse(baseDir);\r\n\r\n // Ordenar por número si los archivos comienzan con un número\r\n cubeFiles.sort((a, b) => {\r\n const aNum = parseInt(path.basename(a));\r\n const bNum = parseInt(path.basename(b));\r\n return (isNaN(aNum) ? 0 : aNum) - (isNaN(bNum) ? 0 : bNum);\r\n });\r\n\r\n return cubeFiles;\r\n }\r\n}\r\n\r\nexport default FileUtils;"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,gBAAe;AACf,kBAAuC;AACvC,kBAAiB;;;ACFjB,SAAoB;AACpB,WAAsB;AAEtB,IAAM,YAAN,MAAgB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMd,aAAa,WAAW,UAAoC;AAC1D,WAAO,IAAI,QAAQ,CAACA,aAAY;AAC9B,MAAG,UAAY,aAAQ,QAAQ,GAAM,aAAU,MAAM,CAAC,QAAQ;AAC5D,QAAAA,SAAQ,CAAC,GAAG;AAAA,MACd,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,OAAO,eAAe,UAA2B;AAC/C,QAAI;AACF,MAAG,cAAgB,aAAQ,QAAQ,GAAM,aAAU,IAAI;AACvD,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,OAAO,oBAAoB,OAA8B;AACvD,UAAM,QAAQ,MAAM,MAAM,iCAAiC;AAC3D,WAAO,QAAQ,MAAM,CAAC,IAAI;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,OAAO,wBAAwB,KAAa,QAA0B;AACpE,UAAM,UAAe,aAAQ,GAAG;AAChC,UAAM,YAAsB,CAAC;AAE7B,aAAS,QAAQ,YAA0B;AACzC,YAAM,UAAa,eAAY,YAAY,EAAE,eAAe,KAAK,CAAC;AAElE,iBAAW,SAAS,SAAS;AAC3B,cAAM,WAAgB,UAAK,YAAY,MAAM,IAAI;AAEjD,YAAI,MAAM,YAAY,GAAG;AACvB,kBAAQ,QAAQ;AAAA,QAClB,WAAW,MAAM,OAAO,KAAK,MAAM,KAAK,SAAS,MAAM,GAAG;AACxD,oBAAU,KAAK,QAAQ;AAAA,QACzB;AAAA,MACF;AAAA,IACF;AAEA,YAAQ,OAAO;AAGf,cAAU,KAAK,CAAC,GAAG,MAAM;AACvB,YAAM,OAAO,SAAc,cAAS,CAAC,CAAC;AACtC,YAAM,OAAO,SAAc,cAAS,CAAC,CAAC;AACtC,cAAQ,MAAM,IAAI,IAAI,IAAI,SAAS,MAAM,IAAI,IAAI,IAAI;AAAA,IACvD,CAAC;AAED,WAAO;AAAA,EACT;AACF;AAEA,IAAO,oBAAQ;;;ADhEf,IAAM,SAAN,MAAa;AAAA,EACD;AAAA,EACA;AAAA,EAER,YAAY,MAAc;AACtB,SAAK,OAAO;AACZ,UAAM,SAAS,IAAI,mBAAO,IAAI;AAC9B,SAAK,SAAS;AAAA,EAClB;AAAA,EAEA,MAAM,iBAA+B;AACjC,UAAM,WAAW,YAAAC,QAAK,QAAQ,QAAQ,IAAI,CAAC;AAC3C,UAAM,WAAY,MAAM,KAAK,OAAO,IAAI,iBAAgB;AAAA,MACpD;AAAA,MAAY;AAAA,MACZ;AAAA,MAAU;AAAA,IACd,CAAC;AACD,QAAG,SAAS,UAAQ,KAAI;AACpB,2BAAqB,SAAS,QAAQ,SAAS,OAAO;AAAA,IAC1D;AACA,WAAO,SAAS;AAAA,EACpB;AAAA,EAEA,MAAM,gBAA8B;AAChC,UAAM,WAAW,YAAAA,QAAK,KAAK,QAAQ,IAAI,GAAG,UAAU,OAAO;AAG3D,QAAI,CAAC,UAAAC,QAAG,WAAW,QAAQ,GAAG;AAC1B,YAAM,IAAI,MAAM,wCAAmC;AAAA,IACvD;AAEA,UAAM,YAAY,kBAAU,wBAAwB,UAAU,YAAY;AAC1E,QAAI,UAAU,WAAW,GAAG;AACxB,YAAM,IAAI,MAAM,sCAAiC;AAAA,IACrD,OAAO;AACH,iBAAW,QAAQ,WAAW;AAC1B,cAAM,WAAW,YAAAD,QAAK,WAAW,IAAI,IAAI,OAAO,YAAAA,QAAK,KAAK,UAAU,IAAI;AACxE,cAAM,QAAQ,UAAAC,QAAG,SAAS,QAAQ;AAClC,YAAI,MAAM,OAAO,GAAG;AAChB,gBAAM,MAAO,MAAM,KAAK,OAAO,IAAI,iBAAgB;AAAA,YAC/C;AAAA,YAAY;AAAA,YACZ;AAAA,YAAU;AAAA,YACV;AAAA,YAAiB;AAAA,UACrB,CAAC;AACD,cAAG,IAAI,UAAQ,KAAI;AACf,iCAAqB,IAAI,QAAQ,IAAI,OAAO;AAAA,UAChD;AACA,gBAAM,YAAY,KAAK,UAAU,IAAI,KAAK,OAAO,EAAE,QAAQ,aAAa,EAAE,EAAE,QAAQ,YAAY,EAAE,EAAE,QAAQ,WAAW,GAAG;AAE1H,gBAAM,UAAW,MAAM,KAAK,OAAO,IAAI,iBAAgB;AAAA,YACnD;AAAA,YAAY;AAAA,YACZ;AAAA,YAAU;AAAA,YACV;AAAA,YAAS;AAAA,UACb,CAAC;AACD,cAAG,QAAQ,UAAQ,KAAI;AACnB,iCAAqB,QAAQ,QAAQ,QAAQ,OAAO;AAAA,UACxD;AACA,iBAAO,QAAQ,KAAK;AAEpB,gBAAM,mBAAmB,KAAK,UAAU,QAAQ,IAAI;AAEpD,gBAAM,WAAY,MAAM,KAAK,OAAO,IAAI,iBAAgB;AAAA,YACpD;AAAA,YAAY;AAAA,YACZ;AAAA,YAAU;AAAA,YACV;AAAA,YAAS;AAAA,UACb,CAAC;AAED,cAAG,SAAS,UAAQ,KAAI;AACpB,iCAAqB,SAAS,QAAQ,SAAS,OAAO;AAAA,UAC1D;AACA,gBAAM,cAAc,QAAQ,KAAK,gBAAgB,OAAO,CAAC,MAAa,EAAE,SAAS,QAAQ,CAAC,EAAE,CAAC;AAE7F,gBAAM,2BAAe,UAAU,IAAI,KAAK,OAAO,IAAI,KAAK,UAAU,WAAW;AAE7E,iBAAO,SAAS;AAAA,QAEpB;AAAA,MACJ;AAAA,IACJ;AACA,WAAO;AAAA,EACX;AAAA,EAEA,MAAM,cAA4B;AAC9B,UAAM,WAAW,YAAAD,QAAK,KAAK,QAAQ,IAAI,GAAG,UAAU,OAAO;AAG3D,QAAI,CAAC,UAAAC,QAAG,WAAW,QAAQ,GAAG;AAC1B,YAAM,IAAI,MAAM,wCAAmC;AAAA,IACvD;AAEA,UAAM,YAAY,kBAAU,wBAAwB,UAAU,YAAY;AAC1E,QAAI,UAAU,WAAW,GAAG;AACxB,YAAM,IAAI,MAAM,sCAAiC;AAAA,IACrD,OAAO;AACH,iBAAW,QAAQ,WAAW;AAC1B,cAAM,WAAW,YAAAD,QAAK,WAAW,IAAI,IAAI,OAAO,YAAAA,QAAK,KAAK,UAAU,IAAI;AACxE,cAAM,QAAQ,UAAAC,QAAG,SAAS,QAAQ;AAClC,YAAI,MAAM,OAAO,GAAG;AAChB,gBAAM,MAAO,MAAM,KAAK,OAAO,IAAI,iBAAgB;AAAA,YAC/C;AAAA,YAAY;AAAA,YACZ;AAAA,YAAiB;AAAA,YACjB;AAAA,YAAU;AAAA,UACd,CAAC;AACD,cAAG,IAAI,UAAQ,KAAI;AACf,iCAAqB,IAAI,QAAQ,IAAI,OAAO;AAAA,UAChD;AACA,gBAAM,YAAY,KAAK,UAAU,IAAI,KAAK,OAAO,EAAE,QAAQ,aAAa,EAAE,EAAE,QAAQ,YAAY,EAAE,EAAE,QAAQ,WAAW,GAAG;AAE1H,gBAAM,UAAW,MAAM,KAAK,OAAO,IAAI,iBAAgB;AAAA,YACnD;AAAA,YAAY;AAAA,YACZ;AAAA,YAAS;AAAA,UACb,CAAC;AACD,cAAG,QAAQ,UAAQ,KAAI;AACnB,iCAAqB,QAAQ,QAAQ,QAAQ,OAAO;AAAA,UACxD;AACA,iBAAO,QAAQ,KAAM;AAErB,gBAAM,cAAc,QAAQ,KAAK,gBAAgB,OAAO,CAAC,MAAa,EAAE,SAAS,QAAQ,CAAC,EAAE,CAAC;AAE7F,cAAG,QAAQ,KAAK,gBAAgB,SAAO,GAAE;AACrC,kBAAM,aAAa,MAAM,2BAAe,qBAAqB,QAAQ,KAAK,gBAAgB,CAAC,GAAG,IAAI,KAAK,OAAO,IAAI,KAAK,OAAO,IAAI,KAAK,QAAQ;AAC/I,oBAAQ,KAAK,kBAAkB;AAAA,UACnC;AAEA,gBAAM,mBAAmB,KAAK,UAAU,QAAQ,IAAI;AAEpD,gBAAM,WAAY,MAAM,KAAK,OAAO,IAAI,iBAAgB;AAAA,YACpD;AAAA,YAAY;AAAA,YACZ;AAAA,YAAU;AAAA,YACV;AAAA,YAAS;AAAA,UACb,CAAC;AAED,cAAG,SAAS,UAAQ,KAAI;AACpB,iCAAqB,SAAS,QAAQ,SAAS,OAAO;AAAA,UAC1D;AAEA,gBAAM,2BAAe,UAAU,IAAI,KAAK,OAAO,IAAI,KAAK,UAAU,WAAW;AAE7E,iBAAO,SAAS;AAAA,QAEpB;AAAA,MACJ;AAAA,IACJ;AACA,WAAO;AAAA,EACX;AAAA,EAEA,MAAM,iBAA+B;AACjC,UAAM,WAAW,YAAAD,QAAK,KAAK,QAAQ,IAAI,GAAG,UAAU,OAAO;AAG3D,QAAI,CAAC,UAAAC,QAAG,WAAW,QAAQ,GAAG;AAC1B,YAAM,IAAI,MAAM,wCAAmC;AAAA,IACvD;AAEA,UAAM,YAAY,kBAAU,wBAAwB,UAAU,aAAa;AAE3E,QAAI,UAAU,WAAW,GAAG;AACxB,YAAM,IAAI,MAAM,sCAAiC;AAAA,IACrD,OAAO;AACH,iBAAW,QAAQ,WAAW;AAC1B,cAAM,WAAW,YAAAD,QAAK,WAAW,IAAI,IAAI,OAAO,YAAAA,QAAK,KAAK,UAAU,IAAI;AACxE,cAAM,QAAQ,UAAAC,QAAG,SAAS,QAAQ;AAElC,YAAI,MAAM,OAAO,GAAG;AAEhB,gBAAM,WAAY,MAAM,KAAK,OAAO,IAAI,iBAAgB;AAAA,YACpD;AAAA,YAAY;AAAA,YACZ;AAAA,YAAiB;AAAA,UACrB,CAAC;AAED,cAAG,SAAS,UAAQ,KAAI;AACpB,iCAAqB,SAAS,QAAQ,SAAS,OAAO;AAAA,UAC1D;AAEA,iBAAO,SAAS;AAAA,QAEpB;AAAA,MACJ;AAAA,IACJ;AACA,WAAO;AAAA,EACX;AAAA,EAEA,MAAM,kBAAgC;AAClC,UAAM,WAAW,YAAAD,QAAK,KAAK,QAAQ,IAAI,GAAG,UAAU,OAAO;AAC3D,UAAM,kBAAkB,YAAAA,QAAK,KAAK,QAAQ,IAAI,GAAG,UAAU,UAAU;AAGrE,QAAI,CAAC,UAAAC,QAAG,WAAW,QAAQ,GAAG;AAC1B,YAAM,IAAI,MAAM,wCAAmC;AAAA,IACvD;AAEA,UAAM,YAAY,kBAAU,wBAAwB,UAAU,cAAc;AAE5E,QAAI,UAAU,WAAW,GAAG;AACxB,YAAM,IAAI,MAAM,sCAAiC;AAAA,IACrD,OAAO;AACH,iBAAW,QAAQ,WAAW;AAC1B,cAAM,WAAW,YAAAD,QAAK,WAAW,IAAI,IAAI,OAAO,YAAAA,QAAK,KAAK,UAAU,IAAI;AACxE,cAAM,QAAQ,UAAAC,QAAG,SAAS,QAAQ;AAElC,YAAI,MAAM,OAAO,GAAG;AAEhB,gBAAM,WAAY,MAAM,KAAK,OAAO,IAAI,iBAAgB;AAAA,YACpD;AAAA,YAAY;AAAA,YACZ;AAAA,YAAe;AAAA,YACf;AAAA,YAAiB;AAAA,UACrB,CAAC;AAED,cAAG,SAAS,UAAQ,KAAI;AACpB,iCAAqB,SAAS,QAAQ,SAAS,OAAO;AAAA,UAC1D;AAEA,iBAAO,SAAS;AAAA,QAEpB;AAAA,MACJ;AAAA,IACJ;AACA,WAAO;AAAA,EACX;AACJ;AAGA,SAAS,qBAAqB,QAAgB,SAAiB;AAC3D,QAAM,QAAQ;AACd,QAAM,MAAM;AACZ,QAAM,SAAS;AACf,QAAM,OAAO;AACb,QAAM,OAAO;AACb,QAAM,OAAO;AACb,QAAM,YAAY;AAClB,QAAM,UAAU;AAEhB,MAAI,SAAS;AACb,MAAI,OAAO;AACX,QAAM,QAAQ,WAAW,MAAM,SAAS;AAGxC,MAAI,QAAQ,SAAS,QAAQ,GAAG;AAC5B,UAAM,QAAQ,QAAQ,MAAM,QAAQ;AACpC,cAAU;AAAA,EAAK,GAAG,GAAG,IAAI,GAAG,MAAM,CAAC,CAAC,GAAG,KAAK;AAC5C,YAAQ;AAAA,EAAK,OAAO,GAAG,IAAI,SAAS,KAAK,IAAI,IAAI,GAAG,MAAM,CAAC,CAAC,GAAG,KAAK;AAAA;AAAA,EACxE,OAAO;AACH,cAAU;AAAA,EAAK,KAAK,GAAG,IAAI,GAAG,OAAO,GAAG,KAAK;AAAA;AAAA,EACjD;AAEA,QAAM,MAAM,IAAI,MAAM;AACtB,QAAM,aAAa,IAAI,OAAO,MAAM,IAAI,KAAK,CAAC;AAG9C,QAAM,oBAAoB,WAAW;AAAA,IAAK,UACtC,KAAK,SAAS,MAAM,KAAK,CAAC,KAAK,SAAS,cAAc;AAAA,EAC1D;AAEA,MAAI,mBAAmB;AACnB,UAAM,QAAQ,kBAAkB,MAAM,sBAAsB,KAC9C,kBAAkB,MAAM,qBAAqB;AAE3D,QAAI,OAAO;AACP,YAAM,CAAC,EAAE,UAAU,SAAS,SAAS,IAAI;AACzC,YAAM,UAAU,SAAS,SAAS,EAAE;AACpC,YAAM,gBAAgB,GAAG,QAAQ,IAAI,OAAO,IAAI,SAAS;AAGzD,UAAI;AACA,cAAM,YAAY,UAAAA,QAAG,aAAa,UAAU,OAAO,EAAE,MAAM,IAAI;AAC/D,cAAM,QAAQ,KAAK,IAAI,GAAG,UAAU,CAAC;AACrC,cAAM,MAAM,KAAK,IAAI,UAAU,QAAQ,UAAU,CAAC;AAElD,kBAAU;AAAA,EAAK,IAAI,GAAG,IAAI,UAAU,KAAK,GAAG,MAAM,IAAI,SAAS,GAAG,aAAa,GAAG,KAAK;AAAA;AAEvF,iBAAS,IAAI,OAAO,IAAI,KAAK,KAAK;AAC9B,gBAAM,OAAO,UAAU,CAAC;AACxB,gBAAM,YAAY,GAAG,IAAI,CAAC,GAAG,SAAS,GAAG,GAAG;AAC5C,gBAAM,UAAU,IAAI,MAAM,UAAU,GAAG,GAAG,KAAK,KAAK,KAAK;AACzD,oBAAU,GAAG,IAAI,GAAG,SAAS,GAAG,KAAK,IAAI,OAAO,IAAI,IAAI;AAAA;AAAA,QAC5D;AAAA,MACJ,SAASC,MAAK;AACV,kBAAU,GAAG,MAAM,sDAA4C,QAAQ,GAAG,KAAK;AAAA;AAC/E,kBAAU;AAAA,EAAK,IAAI,GAAG,IAAI,eAAe,KAAK;AAAA,EAAK,WAAW,MAAM,CAAC,EAAE,KAAK,IAAI,CAAC;AAAA;AAAA,MACrF;AAAA,IACJ;AAAA,EACJ;AACA,YAAU;AACV,UAAQ,MAAM,MAAM;AACpB,UAAQ,KAAK,CAAC;AAClB;;;ADnSA,IAAO,gBAAQ;","names":["resolve","path","fs","err"]}
|
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Main class to handle MySQL database connections and queries.
|
|
3
|
+
* Implements the Singleton pattern to ensure a single instance of the connection pool.
|
|
4
|
+
*/
|
|
5
|
+
declare class Schema {
|
|
6
|
+
private name;
|
|
7
|
+
private engine;
|
|
8
|
+
constructor(name: string);
|
|
9
|
+
createDatabase(): Promise<any>;
|
|
10
|
+
refreshTables(): Promise<any>;
|
|
11
|
+
freshTables(): Promise<any>;
|
|
12
|
+
executeSeeders(): Promise<any>;
|
|
13
|
+
executeTriggers(): Promise<any>;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export { Schema, Schema as default };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Main class to handle MySQL database connections and queries.
|
|
3
|
+
* Implements the Singleton pattern to ensure a single instance of the connection pool.
|
|
4
|
+
*/
|
|
5
|
+
declare class Schema {
|
|
6
|
+
private name;
|
|
7
|
+
private engine;
|
|
8
|
+
constructor(name: string);
|
|
9
|
+
createDatabase(): Promise<any>;
|
|
10
|
+
refreshTables(): Promise<any>;
|
|
11
|
+
freshTables(): Promise<any>;
|
|
12
|
+
executeSeeders(): Promise<any>;
|
|
13
|
+
executeTriggers(): Promise<any>;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export { Schema, Schema as default };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,338 @@
|
|
|
1
|
+
// src/lib/Schema.ts
|
|
2
|
+
import fs2 from "fs";
|
|
3
|
+
import { Engine, TableProcessor } from "@dbcube/core";
|
|
4
|
+
import path2 from "path";
|
|
5
|
+
|
|
6
|
+
// src/lib/FileUtils.ts
|
|
7
|
+
import * as fs from "fs";
|
|
8
|
+
import * as path from "path";
|
|
9
|
+
var FileUtils = class {
|
|
10
|
+
/**
|
|
11
|
+
* Verifica si un archivo existe (asincrónico).
|
|
12
|
+
* @param filePath - Ruta del archivo.
|
|
13
|
+
* @returns True si el archivo existe, false si no.
|
|
14
|
+
*/
|
|
15
|
+
static async fileExists(filePath) {
|
|
16
|
+
return new Promise((resolve2) => {
|
|
17
|
+
fs.access(path.resolve(filePath), fs.constants.F_OK, (err) => {
|
|
18
|
+
resolve2(!err);
|
|
19
|
+
});
|
|
20
|
+
});
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Verifica si un archivo existe (sincrónico).
|
|
24
|
+
* @param filePath - Ruta del archivo.
|
|
25
|
+
* @returns True si el archivo existe, false si no.
|
|
26
|
+
*/
|
|
27
|
+
static fileExistsSync(filePath) {
|
|
28
|
+
try {
|
|
29
|
+
fs.accessSync(path.resolve(filePath), fs.constants.F_OK);
|
|
30
|
+
return true;
|
|
31
|
+
} catch {
|
|
32
|
+
return false;
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
static extractDatabaseName(input) {
|
|
36
|
+
const match = input.match(/@database\(["']?([\w-]+)["']?\)/);
|
|
37
|
+
return match ? match[1] : null;
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Lee recursivamente archivos que terminan en un sufijo dado y los ordena numéricamente.
|
|
41
|
+
* @param dir - Directorio base (relativo o absoluto).
|
|
42
|
+
* @param suffix - Sufijo de archivo (como 'table.cube').
|
|
43
|
+
* @returns Rutas absolutas de los archivos encontrados y ordenados.
|
|
44
|
+
*/
|
|
45
|
+
static getCubeFilesRecursively(dir, suffix) {
|
|
46
|
+
const baseDir = path.resolve(dir);
|
|
47
|
+
const cubeFiles = [];
|
|
48
|
+
function recurse(currentDir) {
|
|
49
|
+
const entries = fs.readdirSync(currentDir, { withFileTypes: true });
|
|
50
|
+
for (const entry of entries) {
|
|
51
|
+
const fullPath = path.join(currentDir, entry.name);
|
|
52
|
+
if (entry.isDirectory()) {
|
|
53
|
+
recurse(fullPath);
|
|
54
|
+
} else if (entry.isFile() && entry.name.endsWith(suffix)) {
|
|
55
|
+
cubeFiles.push(fullPath);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
recurse(baseDir);
|
|
60
|
+
cubeFiles.sort((a, b) => {
|
|
61
|
+
const aNum = parseInt(path.basename(a));
|
|
62
|
+
const bNum = parseInt(path.basename(b));
|
|
63
|
+
return (isNaN(aNum) ? 0 : aNum) - (isNaN(bNum) ? 0 : bNum);
|
|
64
|
+
});
|
|
65
|
+
return cubeFiles;
|
|
66
|
+
}
|
|
67
|
+
};
|
|
68
|
+
var FileUtils_default = FileUtils;
|
|
69
|
+
|
|
70
|
+
// src/lib/Schema.ts
|
|
71
|
+
var Schema = class {
|
|
72
|
+
name;
|
|
73
|
+
engine;
|
|
74
|
+
constructor(name) {
|
|
75
|
+
this.name = name;
|
|
76
|
+
const engine = new Engine(name);
|
|
77
|
+
this.engine = engine;
|
|
78
|
+
}
|
|
79
|
+
async createDatabase() {
|
|
80
|
+
const rootPath = path2.resolve(process.cwd());
|
|
81
|
+
const response = await this.engine.run("schema_engine", [
|
|
82
|
+
"--action",
|
|
83
|
+
"create_database",
|
|
84
|
+
"--path",
|
|
85
|
+
rootPath
|
|
86
|
+
]);
|
|
87
|
+
if (response.status != 200) {
|
|
88
|
+
returnFormattedError(response.status, response.message);
|
|
89
|
+
}
|
|
90
|
+
return response.data;
|
|
91
|
+
}
|
|
92
|
+
async refreshTables() {
|
|
93
|
+
const cubesDir = path2.join(process.cwd(), "dbcube", "cubes");
|
|
94
|
+
if (!fs2.existsSync(cubesDir)) {
|
|
95
|
+
throw new Error("\u274C The cubes folder does not exist");
|
|
96
|
+
}
|
|
97
|
+
const cubeFiles = FileUtils_default.getCubeFilesRecursively("dbcube", "table.cube");
|
|
98
|
+
if (cubeFiles.length === 0) {
|
|
99
|
+
throw new Error("\u274C There are no cubes to execute");
|
|
100
|
+
} else {
|
|
101
|
+
for (const file of cubeFiles) {
|
|
102
|
+
const filePath = path2.isAbsolute(file) ? file : path2.join(cubesDir, file);
|
|
103
|
+
const stats = fs2.statSync(filePath);
|
|
104
|
+
if (stats.isFile()) {
|
|
105
|
+
const dml = await this.engine.run("schema_engine", [
|
|
106
|
+
"--action",
|
|
107
|
+
"parse_table",
|
|
108
|
+
"--mode",
|
|
109
|
+
"refresh",
|
|
110
|
+
"--schema-path",
|
|
111
|
+
filePath
|
|
112
|
+
]);
|
|
113
|
+
if (dml.status != 200) {
|
|
114
|
+
returnFormattedError(dml.status, dml.message);
|
|
115
|
+
}
|
|
116
|
+
const parseJson = JSON.stringify(dml.data.actions).replace(/[\r\n\t]/g, "").replace(/\\[rnt]/g, "").replace(/\s{2,}/g, " ");
|
|
117
|
+
const queries = await this.engine.run("schema_engine", [
|
|
118
|
+
"--action",
|
|
119
|
+
"generate",
|
|
120
|
+
"--mode",
|
|
121
|
+
"refresh",
|
|
122
|
+
"--dml",
|
|
123
|
+
parseJson
|
|
124
|
+
]);
|
|
125
|
+
if (queries.status != 200) {
|
|
126
|
+
returnFormattedError(queries.status, queries.message);
|
|
127
|
+
}
|
|
128
|
+
delete queries.data.database_type;
|
|
129
|
+
const parseJsonQueries = JSON.stringify(queries.data);
|
|
130
|
+
const response = await this.engine.run("schema_engine", [
|
|
131
|
+
"--action",
|
|
132
|
+
"execute",
|
|
133
|
+
"--mode",
|
|
134
|
+
"refresh",
|
|
135
|
+
"--dml",
|
|
136
|
+
parseJsonQueries
|
|
137
|
+
]);
|
|
138
|
+
if (response.status != 200) {
|
|
139
|
+
returnFormattedError(response.status, response.message);
|
|
140
|
+
}
|
|
141
|
+
const createQuery = queries.data.regular_queries.filter((q) => q.includes("CREATE"))[0];
|
|
142
|
+
await TableProcessor.saveQuery(dml.data.table, dml.data.database, createQuery);
|
|
143
|
+
return response.data;
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
return null;
|
|
148
|
+
}
|
|
149
|
+
async freshTables() {
|
|
150
|
+
const cubesDir = path2.join(process.cwd(), "dbcube", "cubes");
|
|
151
|
+
if (!fs2.existsSync(cubesDir)) {
|
|
152
|
+
throw new Error("\u274C The cubes folder does not exist");
|
|
153
|
+
}
|
|
154
|
+
const cubeFiles = FileUtils_default.getCubeFilesRecursively("dbcube", "table.cube");
|
|
155
|
+
if (cubeFiles.length === 0) {
|
|
156
|
+
throw new Error("\u274C There are no cubes to execute");
|
|
157
|
+
} else {
|
|
158
|
+
for (const file of cubeFiles) {
|
|
159
|
+
const filePath = path2.isAbsolute(file) ? file : path2.join(cubesDir, file);
|
|
160
|
+
const stats = fs2.statSync(filePath);
|
|
161
|
+
if (stats.isFile()) {
|
|
162
|
+
const dml = await this.engine.run("schema_engine", [
|
|
163
|
+
"--action",
|
|
164
|
+
"parse_table",
|
|
165
|
+
"--schema-path",
|
|
166
|
+
filePath,
|
|
167
|
+
"--mode",
|
|
168
|
+
"fresh"
|
|
169
|
+
]);
|
|
170
|
+
if (dml.status != 200) {
|
|
171
|
+
returnFormattedError(dml.status, dml.message);
|
|
172
|
+
}
|
|
173
|
+
const parseJson = JSON.stringify(dml.data.actions).replace(/[\r\n\t]/g, "").replace(/\\[rnt]/g, "").replace(/\s{2,}/g, " ");
|
|
174
|
+
const queries = await this.engine.run("schema_engine", [
|
|
175
|
+
"--action",
|
|
176
|
+
"generate",
|
|
177
|
+
"--dml",
|
|
178
|
+
parseJson
|
|
179
|
+
]);
|
|
180
|
+
if (queries.status != 200) {
|
|
181
|
+
returnFormattedError(queries.status, queries.message);
|
|
182
|
+
}
|
|
183
|
+
delete queries.data._type;
|
|
184
|
+
const createQuery = queries.data.regular_queries.filter((q) => q.includes("CREATE"))[0];
|
|
185
|
+
if (queries.data.regular_queries.length > 0) {
|
|
186
|
+
const nowQueries = await TableProcessor.generateAlterQueries(queries.data.regular_queries[0], dml.data.motor, dml.data.table, dml.data.database);
|
|
187
|
+
queries.data.regular_queries = nowQueries;
|
|
188
|
+
}
|
|
189
|
+
const parseJsonQueries = JSON.stringify(queries.data);
|
|
190
|
+
const response = await this.engine.run("schema_engine", [
|
|
191
|
+
"--action",
|
|
192
|
+
"execute",
|
|
193
|
+
"--mode",
|
|
194
|
+
"fresh",
|
|
195
|
+
"--dml",
|
|
196
|
+
parseJsonQueries
|
|
197
|
+
]);
|
|
198
|
+
if (response.status != 200) {
|
|
199
|
+
returnFormattedError(response.status, response.message);
|
|
200
|
+
}
|
|
201
|
+
await TableProcessor.saveQuery(dml.data.table, dml.data.database, createQuery);
|
|
202
|
+
return response.data;
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
return null;
|
|
207
|
+
}
|
|
208
|
+
async executeSeeders() {
|
|
209
|
+
const cubesDir = path2.join(process.cwd(), "dbcube", "cubes");
|
|
210
|
+
if (!fs2.existsSync(cubesDir)) {
|
|
211
|
+
throw new Error("\u274C The cubes folder does not exist");
|
|
212
|
+
}
|
|
213
|
+
const cubeFiles = FileUtils_default.getCubeFilesRecursively("dbcube", "seeder.cube");
|
|
214
|
+
if (cubeFiles.length === 0) {
|
|
215
|
+
throw new Error("\u274C There are no cubes to execute");
|
|
216
|
+
} else {
|
|
217
|
+
for (const file of cubeFiles) {
|
|
218
|
+
const filePath = path2.isAbsolute(file) ? file : path2.join(cubesDir, file);
|
|
219
|
+
const stats = fs2.statSync(filePath);
|
|
220
|
+
if (stats.isFile()) {
|
|
221
|
+
const response = await this.engine.run("schema_engine", [
|
|
222
|
+
"--action",
|
|
223
|
+
"seeder",
|
|
224
|
+
"--schema-path",
|
|
225
|
+
filePath
|
|
226
|
+
]);
|
|
227
|
+
if (response.status != 200) {
|
|
228
|
+
returnFormattedError(response.status, response.message);
|
|
229
|
+
}
|
|
230
|
+
return response.data;
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
return null;
|
|
235
|
+
}
|
|
236
|
+
async executeTriggers() {
|
|
237
|
+
const cubesDir = path2.join(process.cwd(), "dbcube", "cubes");
|
|
238
|
+
const triggersDirExit = path2.join(process.cwd(), "dbcube", "triggers");
|
|
239
|
+
if (!fs2.existsSync(cubesDir)) {
|
|
240
|
+
throw new Error("\u274C The cubes folder does not exist");
|
|
241
|
+
}
|
|
242
|
+
const cubeFiles = FileUtils_default.getCubeFilesRecursively("dbcube", "trigger.cube");
|
|
243
|
+
if (cubeFiles.length === 0) {
|
|
244
|
+
throw new Error("\u274C There are no cubes to execute");
|
|
245
|
+
} else {
|
|
246
|
+
for (const file of cubeFiles) {
|
|
247
|
+
const filePath = path2.isAbsolute(file) ? file : path2.join(cubesDir, file);
|
|
248
|
+
const stats = fs2.statSync(filePath);
|
|
249
|
+
if (stats.isFile()) {
|
|
250
|
+
const response = await this.engine.run("schema_engine", [
|
|
251
|
+
"--action",
|
|
252
|
+
"trigger",
|
|
253
|
+
"--path-exit",
|
|
254
|
+
triggersDirExit,
|
|
255
|
+
"--schema-path",
|
|
256
|
+
filePath
|
|
257
|
+
]);
|
|
258
|
+
if (response.status != 200) {
|
|
259
|
+
returnFormattedError(response.status, response.message);
|
|
260
|
+
}
|
|
261
|
+
return response.data;
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
return null;
|
|
266
|
+
}
|
|
267
|
+
};
|
|
268
|
+
function returnFormattedError(status, message) {
|
|
269
|
+
const RESET = "\x1B[0m";
|
|
270
|
+
const RED = "\x1B[31m";
|
|
271
|
+
const YELLOW = "\x1B[33m";
|
|
272
|
+
const BOLD = "\x1B[1m";
|
|
273
|
+
const CYAN = "\x1B[36m";
|
|
274
|
+
const GRAY = "\x1B[90m";
|
|
275
|
+
const UNDERLINE = "\x1B[4m";
|
|
276
|
+
const MAGENTA = "\x1B[35m";
|
|
277
|
+
let output = "";
|
|
278
|
+
let help = "";
|
|
279
|
+
const color = status === 600 ? YELLOW : RED;
|
|
280
|
+
if (message.includes("[help]")) {
|
|
281
|
+
const parts = message.split("[help]");
|
|
282
|
+
output += `
|
|
283
|
+
${RED}${BOLD}${parts[0]}${RESET}`;
|
|
284
|
+
help += `
|
|
285
|
+
${MAGENTA}${BOLD}[help]${RESET} ${GRAY}${parts[1]}${RESET}
|
|
286
|
+
`;
|
|
287
|
+
} else {
|
|
288
|
+
output += `
|
|
289
|
+
${color}${BOLD}${message}${RESET}
|
|
290
|
+
`;
|
|
291
|
+
}
|
|
292
|
+
const err = new Error();
|
|
293
|
+
const stackLines = err.stack?.split("\n") || [];
|
|
294
|
+
const relevantStackLine = stackLines.find(
|
|
295
|
+
(line) => line.includes(".js:") && !line.includes("node_modules")
|
|
296
|
+
);
|
|
297
|
+
if (relevantStackLine) {
|
|
298
|
+
const match = relevantStackLine.match(/\((.*):(\d+):(\d+)\)/) || relevantStackLine.match(/at (.*):(\d+):(\d+)/);
|
|
299
|
+
if (match) {
|
|
300
|
+
const [, filePath, lineStr, columnStr] = match;
|
|
301
|
+
const lineNum = parseInt(lineStr, 10);
|
|
302
|
+
const errorLocation = `${filePath}:${lineStr}:${columnStr}`;
|
|
303
|
+
try {
|
|
304
|
+
const codeLines = fs2.readFileSync(filePath, "utf-8").split("\n");
|
|
305
|
+
const start = Math.max(0, lineNum - 3);
|
|
306
|
+
const end = Math.min(codeLines.length, lineNum + 2);
|
|
307
|
+
output += `
|
|
308
|
+
${CYAN}${BOLD}[code] ${RESET}${YELLOW} ${UNDERLINE}${errorLocation}${RESET}
|
|
309
|
+
`;
|
|
310
|
+
for (let i = start; i < end; i++) {
|
|
311
|
+
const line = codeLines[i];
|
|
312
|
+
const lineLabel = `${i + 1}`.padStart(4, " ");
|
|
313
|
+
const pointer = i + 1 === lineNum ? `${RED}<-${RESET}` : " ";
|
|
314
|
+
output += `${GRAY}${lineLabel}${RESET} ${pointer} ${line}
|
|
315
|
+
`;
|
|
316
|
+
}
|
|
317
|
+
} catch (err2) {
|
|
318
|
+
output += `${YELLOW}\u26A0\uFE0F No se pudo leer el archivo de origen: ${filePath}${RESET}
|
|
319
|
+
`;
|
|
320
|
+
output += `
|
|
321
|
+
${CYAN}${BOLD}Stack Trace:${RESET}
|
|
322
|
+
${stackLines.slice(2).join("\n")}
|
|
323
|
+
`;
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
output += help;
|
|
328
|
+
console.error(output);
|
|
329
|
+
process.exit(1);
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
// src/index.ts
|
|
333
|
+
var index_default = Schema;
|
|
334
|
+
export {
|
|
335
|
+
Schema,
|
|
336
|
+
index_default as default
|
|
337
|
+
};
|
|
338
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/lib/Schema.ts","../src/lib/FileUtils.ts","../src/index.ts"],"sourcesContent":["import fs from 'fs';\r\nimport { Engine, TableProcessor } from \"@dbcube/core\";\r\nimport path from 'path';\r\nimport FileUtils from './FileUtils';\r\n\r\n/**\r\n * Main class to handle MySQL database connections and queries.\r\n * Implements the Singleton pattern to ensure a single instance of the connection pool.\r\n */\r\nclass Schema {\r\n private name: string;\r\n private engine: any;\r\n\r\n constructor(name: string) {\r\n this.name = name; \r\n const engine = new Engine(name);\r\n this.engine = engine;\r\n }\r\n\r\n async createDatabase(): Promise<any> {\r\n const rootPath = path.resolve(process.cwd());\r\n const response = await this.engine.run('schema_engine',[\r\n '--action', 'create_database',\r\n '--path', rootPath,\r\n ]);\r\n if(response.status!=200){\r\n returnFormattedError(response.status, response.message);\r\n }\r\n return response.data;\r\n }\r\n\r\n async refreshTables(): Promise<any> {\r\n const cubesDir = path.join(process.cwd(), 'dbcube', 'cubes');\r\n \r\n // Verificar si la carpeta existe\r\n if (!fs.existsSync(cubesDir)) {\r\n throw new Error('❌ The cubes folder does not exist');\r\n }\r\n\r\n const cubeFiles = FileUtils.getCubeFilesRecursively('dbcube', 'table.cube');\r\n if (cubeFiles.length === 0) {\r\n throw new Error('❌ There are no cubes to execute');\r\n } else { \r\n for (const file of cubeFiles) {\r\n const filePath = path.isAbsolute(file) ? file : path.join(cubesDir, file);\r\n const stats = fs.statSync(filePath);\r\n if (stats.isFile()) {\r\n const dml = await this.engine.run('schema_engine',[\r\n '--action', 'parse_table',\r\n '--mode', 'refresh',\r\n '--schema-path', filePath,\r\n ]);\r\n if(dml.status!=200){\r\n returnFormattedError(dml.status, dml.message);\r\n }\r\n const parseJson = JSON.stringify(dml.data.actions).replace(/[\\r\\n\\t]/g, '').replace(/\\\\[rnt]/g, '').replace(/\\s{2,}/g, ' '); \r\n\r\n const queries = await this.engine.run('schema_engine',[\r\n '--action', 'generate',\r\n '--mode', 'refresh',\r\n '--dml', parseJson,\r\n ]);\r\n if(queries.status!=200){\r\n returnFormattedError(queries.status, queries.message);\r\n }\r\n delete queries.data.database_type;\r\n \r\n const parseJsonQueries = JSON.stringify(queries.data); \r\n\r\n const response = await this.engine.run('schema_engine',[\r\n '--action', 'execute',\r\n '--mode', 'refresh',\r\n '--dml', parseJsonQueries,\r\n ]);\r\n\r\n if(response.status!=200){\r\n returnFormattedError(response.status, response.message);\r\n }\r\n const createQuery = queries.data.regular_queries.filter((q:string) => q.includes(\"CREATE\"))[0];\r\n \r\n await TableProcessor.saveQuery(dml.data.table, dml.data.database, createQuery);\r\n\r\n return response.data;\r\n \r\n }\r\n }\r\n }\r\n return null;\r\n }\r\n\r\n async freshTables(): Promise<any> {\r\n const cubesDir = path.join(process.cwd(), 'dbcube', 'cubes');\r\n \r\n // Verificar si la carpeta existe\r\n if (!fs.existsSync(cubesDir)) {\r\n throw new Error('❌ The cubes folder does not exist');\r\n }\r\n\r\n const cubeFiles = FileUtils.getCubeFilesRecursively('dbcube', 'table.cube');\r\n if (cubeFiles.length === 0) {\r\n throw new Error('❌ There are no cubes to execute');\r\n } else { \r\n for (const file of cubeFiles) {\r\n const filePath = path.isAbsolute(file) ? file : path.join(cubesDir, file);\r\n const stats = fs.statSync(filePath);\r\n if (stats.isFile()) {\r\n const dml = await this.engine.run('schema_engine',[\r\n '--action', 'parse_table',\r\n '--schema-path', filePath,\r\n '--mode', 'fresh',\r\n ]);\r\n if(dml.status!=200){\r\n returnFormattedError(dml.status, dml.message);\r\n }\r\n const parseJson = JSON.stringify(dml.data.actions).replace(/[\\r\\n\\t]/g, '').replace(/\\\\[rnt]/g, '').replace(/\\s{2,}/g, ' '); \r\n\r\n const queries = await this.engine.run('schema_engine',[\r\n '--action', 'generate',\r\n '--dml', parseJson,\r\n ]);\r\n if(queries.status!=200){\r\n returnFormattedError(queries.status, queries.message);\r\n }\r\n delete queries.data. _type;\r\n \r\n const createQuery = queries.data.regular_queries.filter((q:string) => q.includes(\"CREATE\"))[0];\r\n \r\n if(queries.data.regular_queries.length>0){\r\n const nowQueries = await TableProcessor.generateAlterQueries(queries.data.regular_queries[0], dml.data.motor, dml.data.table, dml.data.database);\r\n queries.data.regular_queries = nowQueries;\r\n }\r\n\r\n const parseJsonQueries = JSON.stringify(queries.data); \r\n\r\n const response = await this.engine.run('schema_engine',[\r\n '--action', 'execute',\r\n '--mode', 'fresh',\r\n '--dml', parseJsonQueries,\r\n ]);\r\n\r\n if(response.status!=200){\r\n returnFormattedError(response.status, response.message);\r\n }\r\n \r\n await TableProcessor.saveQuery(dml.data.table, dml.data.database, createQuery);\r\n\r\n return response.data;\r\n \r\n }\r\n }\r\n }\r\n return null;\r\n }\r\n\r\n async executeSeeders(): Promise<any> {\r\n const cubesDir = path.join(process.cwd(), 'dbcube', 'cubes');\r\n \r\n // Verificar si la carpeta existe\r\n if (!fs.existsSync(cubesDir)) {\r\n throw new Error('❌ The cubes folder does not exist');\r\n }\r\n\r\n const cubeFiles = FileUtils.getCubeFilesRecursively('dbcube', 'seeder.cube');\r\n\r\n if (cubeFiles.length === 0) {\r\n throw new Error('❌ There are no cubes to execute');\r\n } else { \r\n for (const file of cubeFiles) {\r\n const filePath = path.isAbsolute(file) ? file : path.join(cubesDir, file);\r\n const stats = fs.statSync(filePath);\r\n \r\n if (stats.isFile()) {\r\n\r\n const response = await this.engine.run('schema_engine',[\r\n '--action', 'seeder',\r\n '--schema-path', filePath,\r\n ]);\r\n\r\n if(response.status!=200){\r\n returnFormattedError(response.status, response.message);\r\n }\r\n\r\n return response.data;\r\n \r\n }\r\n }\r\n }\r\n return null;\r\n }\r\n\r\n async executeTriggers(): Promise<any> {\r\n const cubesDir = path.join(process.cwd(), 'dbcube', 'cubes');\r\n const triggersDirExit = path.join(process.cwd(), 'dbcube', 'triggers');\r\n \r\n // Verificar si la carpeta existe\r\n if (!fs.existsSync(cubesDir)) {\r\n throw new Error('❌ The cubes folder does not exist');\r\n }\r\n\r\n const cubeFiles = FileUtils.getCubeFilesRecursively('dbcube', 'trigger.cube');\r\n\r\n if (cubeFiles.length === 0) {\r\n throw new Error('❌ There are no cubes to execute');\r\n } else { \r\n for (const file of cubeFiles) {\r\n const filePath = path.isAbsolute(file) ? file : path.join(cubesDir, file);\r\n const stats = fs.statSync(filePath);\r\n \r\n if (stats.isFile()) {\r\n\r\n const response = await this.engine.run('schema_engine',[\r\n '--action', 'trigger',\r\n '--path-exit', triggersDirExit,\r\n '--schema-path', filePath,\r\n ]);\r\n\r\n if(response.status!=200){\r\n returnFormattedError(response.status, response.message);\r\n }\r\n\r\n return response.data;\r\n \r\n }\r\n }\r\n }\r\n return null;\r\n }\r\n}\r\n\r\n\r\nfunction returnFormattedError(status: number, message: string) {\r\n const RESET = '\\x1b[0m';\r\n const RED = '\\x1b[31m';\r\n const YELLOW = '\\x1b[33m';\r\n const BOLD = '\\x1b[1m';\r\n const CYAN = '\\x1b[36m';\r\n const GRAY = '\\x1b[90m';\r\n const UNDERLINE = '\\x1b[4m';\r\n const MAGENTA = '\\x1b[35m';\r\n\r\n let output = '';\r\n let help = '';\r\n const color = status === 600 ? YELLOW : RED;\r\n\r\n \r\n if (message.includes(\"[help]\")) {\r\n const parts = message.split(\"[help]\");\r\n output += `\\n${RED}${BOLD}${parts[0]}${RESET}`;\r\n help += `\\n${MAGENTA}${BOLD}[help]${RESET} ${GRAY}${parts[1]}${RESET}\\n`;\r\n } else {\r\n output += `\\n${color}${BOLD}${message}${RESET}\\n`;\r\n }\r\n\r\n const err = new Error();\r\n const stackLines = err.stack?.split('\\n') || [];\r\n\r\n // Buscamos la primera línea del stack fuera de node_modules\r\n const relevantStackLine = stackLines.find(line => \r\n line.includes('.js:') && !line.includes('node_modules')\r\n );\r\n\r\n if (relevantStackLine) {\r\n const match = relevantStackLine.match(/\\((.*):(\\d+):(\\d+)\\)/) || \r\n relevantStackLine.match(/at (.*):(\\d+):(\\d+)/);\r\n\r\n if (match) {\r\n const [, filePath, lineStr, columnStr] = match;\r\n const lineNum = parseInt(lineStr, 10);\r\n const errorLocation = `${filePath}:${lineStr}:${columnStr}`;\r\n\r\n // Leemos el archivo y sacamos las líneas relevantes\r\n try {\r\n const codeLines = fs.readFileSync(filePath, 'utf-8').split('\\n');\r\n const start = Math.max(0, lineNum - 3);\r\n const end = Math.min(codeLines.length, lineNum + 2);\r\n\r\n output += `\\n${CYAN}${BOLD}[code] ${RESET}${YELLOW} ${UNDERLINE}${errorLocation}${RESET}\\n`;\r\n\r\n for (let i = start; i < end; i++) {\r\n const line = codeLines[i];\r\n const lineLabel = `${i + 1}`.padStart(4, ' ');\r\n const pointer = i + 1 === lineNum ? `${RED}<-${RESET}` : ' ';\r\n output += `${GRAY}${lineLabel}${RESET} ${pointer} ${line}\\n`;\r\n }\r\n } catch (err) {\r\n output += `${YELLOW}⚠️ No se pudo leer el archivo de origen: ${filePath}${RESET}\\n`;\r\n output += `\\n${CYAN}${BOLD}Stack Trace:${RESET}\\n${stackLines.slice(2).join('\\n')}\\n`;\r\n }\r\n }\r\n } \r\n output += help;\r\n console.error(output);\r\n process.exit(1);\r\n}\r\n\r\nexport default Schema;\r\nexport { Schema };\r\n","import * as fs from 'fs';\r\nimport * as path from 'path';\r\n\r\nclass FileUtils {\r\n /**\r\n * Verifica si un archivo existe (asincrónico).\r\n * @param filePath - Ruta del archivo.\r\n * @returns True si el archivo existe, false si no.\r\n */\r\n static async fileExists(filePath: string): Promise<boolean> {\r\n return new Promise((resolve) => {\r\n fs.access(path.resolve(filePath), fs.constants.F_OK, (err) => {\r\n resolve(!err);\r\n });\r\n });\r\n }\r\n\r\n /**\r\n * Verifica si un archivo existe (sincrónico).\r\n * @param filePath - Ruta del archivo.\r\n * @returns True si el archivo existe, false si no.\r\n */\r\n static fileExistsSync(filePath: string): boolean {\r\n try {\r\n fs.accessSync(path.resolve(filePath), fs.constants.F_OK);\r\n return true;\r\n } catch {\r\n return false;\r\n }\r\n }\r\n\r\n static extractDatabaseName(input: string): string | null {\r\n const match = input.match(/@database\\([\"']?([\\w-]+)[\"']?\\)/);\r\n return match ? match[1] : null;\r\n }\r\n\r\n /**\r\n * Lee recursivamente archivos que terminan en un sufijo dado y los ordena numéricamente.\r\n * @param dir - Directorio base (relativo o absoluto).\r\n * @param suffix - Sufijo de archivo (como 'table.cube').\r\n * @returns Rutas absolutas de los archivos encontrados y ordenados.\r\n */\r\n static getCubeFilesRecursively(dir: string, suffix: string): string[] {\r\n const baseDir = path.resolve(dir); // ✅ Asegura que sea absoluto\r\n const cubeFiles: string[] = [];\r\n\r\n function recurse(currentDir: string): void {\r\n const entries = fs.readdirSync(currentDir, { withFileTypes: true });\r\n \r\n for (const entry of entries) {\r\n const fullPath = path.join(currentDir, entry.name);\r\n \r\n if (entry.isDirectory()) {\r\n recurse(fullPath);\r\n } else if (entry.isFile() && entry.name.endsWith(suffix)) {\r\n cubeFiles.push(fullPath); // Ya es absoluta\r\n }\r\n }\r\n }\r\n\r\n recurse(baseDir);\r\n\r\n // Ordenar por número si los archivos comienzan con un número\r\n cubeFiles.sort((a, b) => {\r\n const aNum = parseInt(path.basename(a));\r\n const bNum = parseInt(path.basename(b));\r\n return (isNaN(aNum) ? 0 : aNum) - (isNaN(bNum) ? 0 : bNum);\r\n });\r\n\r\n return cubeFiles;\r\n }\r\n}\r\n\r\nexport default FileUtils;","import { Schema } from './lib/Schema';\r\n\r\nexport default Schema;\r\nexport { Schema };"],"mappings":";AAAA,OAAOA,SAAQ;AACf,SAAS,QAAQ,sBAAsB;AACvC,OAAOC,WAAU;;;ACFjB,YAAY,QAAQ;AACpB,YAAY,UAAU;AAEtB,IAAM,YAAN,MAAgB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMd,aAAa,WAAW,UAAoC;AAC1D,WAAO,IAAI,QAAQ,CAACC,aAAY;AAC9B,MAAG,UAAY,aAAQ,QAAQ,GAAM,aAAU,MAAM,CAAC,QAAQ;AAC5D,QAAAA,SAAQ,CAAC,GAAG;AAAA,MACd,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,OAAO,eAAe,UAA2B;AAC/C,QAAI;AACF,MAAG,cAAgB,aAAQ,QAAQ,GAAM,aAAU,IAAI;AACvD,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,OAAO,oBAAoB,OAA8B;AACvD,UAAM,QAAQ,MAAM,MAAM,iCAAiC;AAC3D,WAAO,QAAQ,MAAM,CAAC,IAAI;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,OAAO,wBAAwB,KAAa,QAA0B;AACpE,UAAM,UAAe,aAAQ,GAAG;AAChC,UAAM,YAAsB,CAAC;AAE7B,aAAS,QAAQ,YAA0B;AACzC,YAAM,UAAa,eAAY,YAAY,EAAE,eAAe,KAAK,CAAC;AAElE,iBAAW,SAAS,SAAS;AAC3B,cAAM,WAAgB,UAAK,YAAY,MAAM,IAAI;AAEjD,YAAI,MAAM,YAAY,GAAG;AACvB,kBAAQ,QAAQ;AAAA,QAClB,WAAW,MAAM,OAAO,KAAK,MAAM,KAAK,SAAS,MAAM,GAAG;AACxD,oBAAU,KAAK,QAAQ;AAAA,QACzB;AAAA,MACF;AAAA,IACF;AAEA,YAAQ,OAAO;AAGf,cAAU,KAAK,CAAC,GAAG,MAAM;AACvB,YAAM,OAAO,SAAc,cAAS,CAAC,CAAC;AACtC,YAAM,OAAO,SAAc,cAAS,CAAC,CAAC;AACtC,cAAQ,MAAM,IAAI,IAAI,IAAI,SAAS,MAAM,IAAI,IAAI,IAAI;AAAA,IACvD,CAAC;AAED,WAAO;AAAA,EACT;AACF;AAEA,IAAO,oBAAQ;;;ADhEf,IAAM,SAAN,MAAa;AAAA,EACD;AAAA,EACA;AAAA,EAER,YAAY,MAAc;AACtB,SAAK,OAAO;AACZ,UAAM,SAAS,IAAI,OAAO,IAAI;AAC9B,SAAK,SAAS;AAAA,EAClB;AAAA,EAEA,MAAM,iBAA+B;AACjC,UAAM,WAAWC,MAAK,QAAQ,QAAQ,IAAI,CAAC;AAC3C,UAAM,WAAY,MAAM,KAAK,OAAO,IAAI,iBAAgB;AAAA,MACpD;AAAA,MAAY;AAAA,MACZ;AAAA,MAAU;AAAA,IACd,CAAC;AACD,QAAG,SAAS,UAAQ,KAAI;AACpB,2BAAqB,SAAS,QAAQ,SAAS,OAAO;AAAA,IAC1D;AACA,WAAO,SAAS;AAAA,EACpB;AAAA,EAEA,MAAM,gBAA8B;AAChC,UAAM,WAAWA,MAAK,KAAK,QAAQ,IAAI,GAAG,UAAU,OAAO;AAG3D,QAAI,CAACC,IAAG,WAAW,QAAQ,GAAG;AAC1B,YAAM,IAAI,MAAM,wCAAmC;AAAA,IACvD;AAEA,UAAM,YAAY,kBAAU,wBAAwB,UAAU,YAAY;AAC1E,QAAI,UAAU,WAAW,GAAG;AACxB,YAAM,IAAI,MAAM,sCAAiC;AAAA,IACrD,OAAO;AACH,iBAAW,QAAQ,WAAW;AAC1B,cAAM,WAAWD,MAAK,WAAW,IAAI,IAAI,OAAOA,MAAK,KAAK,UAAU,IAAI;AACxE,cAAM,QAAQC,IAAG,SAAS,QAAQ;AAClC,YAAI,MAAM,OAAO,GAAG;AAChB,gBAAM,MAAO,MAAM,KAAK,OAAO,IAAI,iBAAgB;AAAA,YAC/C;AAAA,YAAY;AAAA,YACZ;AAAA,YAAU;AAAA,YACV;AAAA,YAAiB;AAAA,UACrB,CAAC;AACD,cAAG,IAAI,UAAQ,KAAI;AACf,iCAAqB,IAAI,QAAQ,IAAI,OAAO;AAAA,UAChD;AACA,gBAAM,YAAY,KAAK,UAAU,IAAI,KAAK,OAAO,EAAE,QAAQ,aAAa,EAAE,EAAE,QAAQ,YAAY,EAAE,EAAE,QAAQ,WAAW,GAAG;AAE1H,gBAAM,UAAW,MAAM,KAAK,OAAO,IAAI,iBAAgB;AAAA,YACnD;AAAA,YAAY;AAAA,YACZ;AAAA,YAAU;AAAA,YACV;AAAA,YAAS;AAAA,UACb,CAAC;AACD,cAAG,QAAQ,UAAQ,KAAI;AACnB,iCAAqB,QAAQ,QAAQ,QAAQ,OAAO;AAAA,UACxD;AACA,iBAAO,QAAQ,KAAK;AAEpB,gBAAM,mBAAmB,KAAK,UAAU,QAAQ,IAAI;AAEpD,gBAAM,WAAY,MAAM,KAAK,OAAO,IAAI,iBAAgB;AAAA,YACpD;AAAA,YAAY;AAAA,YACZ;AAAA,YAAU;AAAA,YACV;AAAA,YAAS;AAAA,UACb,CAAC;AAED,cAAG,SAAS,UAAQ,KAAI;AACpB,iCAAqB,SAAS,QAAQ,SAAS,OAAO;AAAA,UAC1D;AACA,gBAAM,cAAc,QAAQ,KAAK,gBAAgB,OAAO,CAAC,MAAa,EAAE,SAAS,QAAQ,CAAC,EAAE,CAAC;AAE7F,gBAAM,eAAe,UAAU,IAAI,KAAK,OAAO,IAAI,KAAK,UAAU,WAAW;AAE7E,iBAAO,SAAS;AAAA,QAEpB;AAAA,MACJ;AAAA,IACJ;AACA,WAAO;AAAA,EACX;AAAA,EAEA,MAAM,cAA4B;AAC9B,UAAM,WAAWD,MAAK,KAAK,QAAQ,IAAI,GAAG,UAAU,OAAO;AAG3D,QAAI,CAACC,IAAG,WAAW,QAAQ,GAAG;AAC1B,YAAM,IAAI,MAAM,wCAAmC;AAAA,IACvD;AAEA,UAAM,YAAY,kBAAU,wBAAwB,UAAU,YAAY;AAC1E,QAAI,UAAU,WAAW,GAAG;AACxB,YAAM,IAAI,MAAM,sCAAiC;AAAA,IACrD,OAAO;AACH,iBAAW,QAAQ,WAAW;AAC1B,cAAM,WAAWD,MAAK,WAAW,IAAI,IAAI,OAAOA,MAAK,KAAK,UAAU,IAAI;AACxE,cAAM,QAAQC,IAAG,SAAS,QAAQ;AAClC,YAAI,MAAM,OAAO,GAAG;AAChB,gBAAM,MAAO,MAAM,KAAK,OAAO,IAAI,iBAAgB;AAAA,YAC/C;AAAA,YAAY;AAAA,YACZ;AAAA,YAAiB;AAAA,YACjB;AAAA,YAAU;AAAA,UACd,CAAC;AACD,cAAG,IAAI,UAAQ,KAAI;AACf,iCAAqB,IAAI,QAAQ,IAAI,OAAO;AAAA,UAChD;AACA,gBAAM,YAAY,KAAK,UAAU,IAAI,KAAK,OAAO,EAAE,QAAQ,aAAa,EAAE,EAAE,QAAQ,YAAY,EAAE,EAAE,QAAQ,WAAW,GAAG;AAE1H,gBAAM,UAAW,MAAM,KAAK,OAAO,IAAI,iBAAgB;AAAA,YACnD;AAAA,YAAY;AAAA,YACZ;AAAA,YAAS;AAAA,UACb,CAAC;AACD,cAAG,QAAQ,UAAQ,KAAI;AACnB,iCAAqB,QAAQ,QAAQ,QAAQ,OAAO;AAAA,UACxD;AACA,iBAAO,QAAQ,KAAM;AAErB,gBAAM,cAAc,QAAQ,KAAK,gBAAgB,OAAO,CAAC,MAAa,EAAE,SAAS,QAAQ,CAAC,EAAE,CAAC;AAE7F,cAAG,QAAQ,KAAK,gBAAgB,SAAO,GAAE;AACrC,kBAAM,aAAa,MAAM,eAAe,qBAAqB,QAAQ,KAAK,gBAAgB,CAAC,GAAG,IAAI,KAAK,OAAO,IAAI,KAAK,OAAO,IAAI,KAAK,QAAQ;AAC/I,oBAAQ,KAAK,kBAAkB;AAAA,UACnC;AAEA,gBAAM,mBAAmB,KAAK,UAAU,QAAQ,IAAI;AAEpD,gBAAM,WAAY,MAAM,KAAK,OAAO,IAAI,iBAAgB;AAAA,YACpD;AAAA,YAAY;AAAA,YACZ;AAAA,YAAU;AAAA,YACV;AAAA,YAAS;AAAA,UACb,CAAC;AAED,cAAG,SAAS,UAAQ,KAAI;AACpB,iCAAqB,SAAS,QAAQ,SAAS,OAAO;AAAA,UAC1D;AAEA,gBAAM,eAAe,UAAU,IAAI,KAAK,OAAO,IAAI,KAAK,UAAU,WAAW;AAE7E,iBAAO,SAAS;AAAA,QAEpB;AAAA,MACJ;AAAA,IACJ;AACA,WAAO;AAAA,EACX;AAAA,EAEA,MAAM,iBAA+B;AACjC,UAAM,WAAWD,MAAK,KAAK,QAAQ,IAAI,GAAG,UAAU,OAAO;AAG3D,QAAI,CAACC,IAAG,WAAW,QAAQ,GAAG;AAC1B,YAAM,IAAI,MAAM,wCAAmC;AAAA,IACvD;AAEA,UAAM,YAAY,kBAAU,wBAAwB,UAAU,aAAa;AAE3E,QAAI,UAAU,WAAW,GAAG;AACxB,YAAM,IAAI,MAAM,sCAAiC;AAAA,IACrD,OAAO;AACH,iBAAW,QAAQ,WAAW;AAC1B,cAAM,WAAWD,MAAK,WAAW,IAAI,IAAI,OAAOA,MAAK,KAAK,UAAU,IAAI;AACxE,cAAM,QAAQC,IAAG,SAAS,QAAQ;AAElC,YAAI,MAAM,OAAO,GAAG;AAEhB,gBAAM,WAAY,MAAM,KAAK,OAAO,IAAI,iBAAgB;AAAA,YACpD;AAAA,YAAY;AAAA,YACZ;AAAA,YAAiB;AAAA,UACrB,CAAC;AAED,cAAG,SAAS,UAAQ,KAAI;AACpB,iCAAqB,SAAS,QAAQ,SAAS,OAAO;AAAA,UAC1D;AAEA,iBAAO,SAAS;AAAA,QAEpB;AAAA,MACJ;AAAA,IACJ;AACA,WAAO;AAAA,EACX;AAAA,EAEA,MAAM,kBAAgC;AAClC,UAAM,WAAWD,MAAK,KAAK,QAAQ,IAAI,GAAG,UAAU,OAAO;AAC3D,UAAM,kBAAkBA,MAAK,KAAK,QAAQ,IAAI,GAAG,UAAU,UAAU;AAGrE,QAAI,CAACC,IAAG,WAAW,QAAQ,GAAG;AAC1B,YAAM,IAAI,MAAM,wCAAmC;AAAA,IACvD;AAEA,UAAM,YAAY,kBAAU,wBAAwB,UAAU,cAAc;AAE5E,QAAI,UAAU,WAAW,GAAG;AACxB,YAAM,IAAI,MAAM,sCAAiC;AAAA,IACrD,OAAO;AACH,iBAAW,QAAQ,WAAW;AAC1B,cAAM,WAAWD,MAAK,WAAW,IAAI,IAAI,OAAOA,MAAK,KAAK,UAAU,IAAI;AACxE,cAAM,QAAQC,IAAG,SAAS,QAAQ;AAElC,YAAI,MAAM,OAAO,GAAG;AAEhB,gBAAM,WAAY,MAAM,KAAK,OAAO,IAAI,iBAAgB;AAAA,YACpD;AAAA,YAAY;AAAA,YACZ;AAAA,YAAe;AAAA,YACf;AAAA,YAAiB;AAAA,UACrB,CAAC;AAED,cAAG,SAAS,UAAQ,KAAI;AACpB,iCAAqB,SAAS,QAAQ,SAAS,OAAO;AAAA,UAC1D;AAEA,iBAAO,SAAS;AAAA,QAEpB;AAAA,MACJ;AAAA,IACJ;AACA,WAAO;AAAA,EACX;AACJ;AAGA,SAAS,qBAAqB,QAAgB,SAAiB;AAC3D,QAAM,QAAQ;AACd,QAAM,MAAM;AACZ,QAAM,SAAS;AACf,QAAM,OAAO;AACb,QAAM,OAAO;AACb,QAAM,OAAO;AACb,QAAM,YAAY;AAClB,QAAM,UAAU;AAEhB,MAAI,SAAS;AACb,MAAI,OAAO;AACX,QAAM,QAAQ,WAAW,MAAM,SAAS;AAGxC,MAAI,QAAQ,SAAS,QAAQ,GAAG;AAC5B,UAAM,QAAQ,QAAQ,MAAM,QAAQ;AACpC,cAAU;AAAA,EAAK,GAAG,GAAG,IAAI,GAAG,MAAM,CAAC,CAAC,GAAG,KAAK;AAC5C,YAAQ;AAAA,EAAK,OAAO,GAAG,IAAI,SAAS,KAAK,IAAI,IAAI,GAAG,MAAM,CAAC,CAAC,GAAG,KAAK;AAAA;AAAA,EACxE,OAAO;AACH,cAAU;AAAA,EAAK,KAAK,GAAG,IAAI,GAAG,OAAO,GAAG,KAAK;AAAA;AAAA,EACjD;AAEA,QAAM,MAAM,IAAI,MAAM;AACtB,QAAM,aAAa,IAAI,OAAO,MAAM,IAAI,KAAK,CAAC;AAG9C,QAAM,oBAAoB,WAAW;AAAA,IAAK,UACtC,KAAK,SAAS,MAAM,KAAK,CAAC,KAAK,SAAS,cAAc;AAAA,EAC1D;AAEA,MAAI,mBAAmB;AACnB,UAAM,QAAQ,kBAAkB,MAAM,sBAAsB,KAC9C,kBAAkB,MAAM,qBAAqB;AAE3D,QAAI,OAAO;AACP,YAAM,CAAC,EAAE,UAAU,SAAS,SAAS,IAAI;AACzC,YAAM,UAAU,SAAS,SAAS,EAAE;AACpC,YAAM,gBAAgB,GAAG,QAAQ,IAAI,OAAO,IAAI,SAAS;AAGzD,UAAI;AACA,cAAM,YAAYA,IAAG,aAAa,UAAU,OAAO,EAAE,MAAM,IAAI;AAC/D,cAAM,QAAQ,KAAK,IAAI,GAAG,UAAU,CAAC;AACrC,cAAM,MAAM,KAAK,IAAI,UAAU,QAAQ,UAAU,CAAC;AAElD,kBAAU;AAAA,EAAK,IAAI,GAAG,IAAI,UAAU,KAAK,GAAG,MAAM,IAAI,SAAS,GAAG,aAAa,GAAG,KAAK;AAAA;AAEvF,iBAAS,IAAI,OAAO,IAAI,KAAK,KAAK;AAC9B,gBAAM,OAAO,UAAU,CAAC;AACxB,gBAAM,YAAY,GAAG,IAAI,CAAC,GAAG,SAAS,GAAG,GAAG;AAC5C,gBAAM,UAAU,IAAI,MAAM,UAAU,GAAG,GAAG,KAAK,KAAK,KAAK;AACzD,oBAAU,GAAG,IAAI,GAAG,SAAS,GAAG,KAAK,IAAI,OAAO,IAAI,IAAI;AAAA;AAAA,QAC5D;AAAA,MACJ,SAASC,MAAK;AACV,kBAAU,GAAG,MAAM,sDAA4C,QAAQ,GAAG,KAAK;AAAA;AAC/E,kBAAU;AAAA,EAAK,IAAI,GAAG,IAAI,eAAe,KAAK;AAAA,EAAK,WAAW,MAAM,CAAC,EAAE,KAAK,IAAI,CAAC;AAAA;AAAA,MACrF;AAAA,IACJ;AAAA,EACJ;AACA,YAAU;AACV,UAAQ,MAAM,MAAM;AACpB,UAAQ,KAAK,CAAC;AAClB;;;AEnSA,IAAO,gBAAQ;","names":["fs","path","resolve","path","fs","err"]}
|
package/package.json
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@dbcube/schema-builder",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"description": "The DBCube Query Builder is a lightweight, flexible, and fluent library for building queries across multiple database engines, including MySQL, PostgreSQL, SQLite, and MongoDB, using JavaScript/Node.js. \nIts agnostic design allows you to generate data manipulation (DML) and data definition (DDL) operations with a clean, chainable syntax—without sacrificing power or expressiveness.\nIt’s designed to work seamlessly in both SQL and NoSQL environments, providing a consistent abstraction layer across different storage technologies while still leveraging the native capabilities of each engine.",
|
|
5
|
+
"main": "dist/index.cjs",
|
|
6
|
+
"module": "dist/index.js",
|
|
7
|
+
"types": "dist/index.d.ts",
|
|
8
|
+
"exports": {
|
|
9
|
+
"import": "./dist/index.js",
|
|
10
|
+
"require": "./dist/index.cjs"
|
|
11
|
+
},
|
|
12
|
+
"scripts": {
|
|
13
|
+
"build": "tsup"
|
|
14
|
+
},
|
|
15
|
+
"keywords": [
|
|
16
|
+
"dbcube",
|
|
17
|
+
"mysql",
|
|
18
|
+
"mongodb",
|
|
19
|
+
"postgresql",
|
|
20
|
+
"sqlite",
|
|
21
|
+
"database",
|
|
22
|
+
"db",
|
|
23
|
+
"orm",
|
|
24
|
+
"sql",
|
|
25
|
+
"query builder",
|
|
26
|
+
"query",
|
|
27
|
+
"builder",
|
|
28
|
+
"nodejs",
|
|
29
|
+
"backend",
|
|
30
|
+
"library",
|
|
31
|
+
"database library",
|
|
32
|
+
"base de datos",
|
|
33
|
+
"sequelize alternative",
|
|
34
|
+
"lightweight orm",
|
|
35
|
+
"sql toolkit",
|
|
36
|
+
"query optimizer",
|
|
37
|
+
"typescript",
|
|
38
|
+
"javascript",
|
|
39
|
+
"async",
|
|
40
|
+
"modern database",
|
|
41
|
+
"dbcube-orm",
|
|
42
|
+
"@dbcube/query-builder",
|
|
43
|
+
"query-builder",
|
|
44
|
+
"dbcube-query-builder",
|
|
45
|
+
"@dbcube/query-builder",
|
|
46
|
+
"albert",
|
|
47
|
+
"araya",
|
|
48
|
+
"@dbcube",
|
|
49
|
+
"@albrtaraya"
|
|
50
|
+
],
|
|
51
|
+
"author": "Albert Araya",
|
|
52
|
+
"license": "MIT",
|
|
53
|
+
"publishConfig": {
|
|
54
|
+
"access": "public"
|
|
55
|
+
},
|
|
56
|
+
"dependencies": {
|
|
57
|
+
"@dbcube/core": "file:../core"
|
|
58
|
+
},
|
|
59
|
+
"repository": {
|
|
60
|
+
"type": "git",
|
|
61
|
+
"url": "https://github.com/Dbcube/query-builder"
|
|
62
|
+
},
|
|
63
|
+
"devDependencies": {
|
|
64
|
+
"@types/node": "^22.16.2",
|
|
65
|
+
"rollup": "^4.36.0",
|
|
66
|
+
"tsup": "^8.4.0",
|
|
67
|
+
"typescript": "^5.7.2"
|
|
68
|
+
}
|
|
69
|
+
}
|
package/tsup.config.ts
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { defineConfig } from 'tsup';
|
|
2
|
+
|
|
3
|
+
export default defineConfig({
|
|
4
|
+
entry: ['src/index.ts'], // Punto de entrada
|
|
5
|
+
format: ['cjs', 'esm'], // Genera CommonJS y ES Modules
|
|
6
|
+
dts: true, // Genera archivos de tipos
|
|
7
|
+
clean: true, // Limpia el directorio de salida
|
|
8
|
+
outDir: 'dist', // Directorio de salida
|
|
9
|
+
splitting: false, // Desactiva la división de código
|
|
10
|
+
sourcemap: true, // Genera sourcemaps
|
|
11
|
+
outExtension: ({ format }) => ({ // Forzar extensiones personalizadas
|
|
12
|
+
js: format === 'cjs' ? '.cjs' : '.js',
|
|
13
|
+
}),
|
|
14
|
+
});
|