inibase 1.1.20 → 1.1.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/file.d.ts +17 -14
- package/dist/file.js +44 -44
- package/dist/index.d.ts +8 -3
- package/dist/index.js +91 -87
- package/dist/utils.d.ts +3 -4
- package/dist/utils.js +37 -34
- package/dist/utils.server.d.ts +10 -13
- package/dist/utils.server.js +25 -37
- package/package.json +1 -1
package/dist/file.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type
|
|
1
|
+
import { type ComparisonOperator, type Field } from "./index.js";
|
|
2
2
|
export declare const lock: (folderPath: string, prefix?: string) => Promise<void>;
|
|
3
3
|
export declare const unlock: (folderPath: string, prefix?: string) => Promise<void>;
|
|
4
4
|
export declare const write: (filePath: string, data: any) => Promise<void>;
|
|
@@ -25,27 +25,30 @@ export declare const encode: (input: string | number | boolean | null | (string
|
|
|
25
25
|
* Handles different formats of input, including strings, numbers, and their array representations.
|
|
26
26
|
*
|
|
27
27
|
* @param input - The input to be decoded, can be a string, number, or null.
|
|
28
|
-
* @param
|
|
29
|
-
* @param fieldChildrenType - Optional type for child elements in array inputs.
|
|
30
|
-
* @param secretKey - Optional secret key for decoding, can be a string or Buffer.
|
|
28
|
+
* @param field - Field object config.
|
|
31
29
|
* @returns Decoded value as a string, number, boolean, or array of these, or null if no fieldType or input is null/empty.
|
|
32
30
|
*/
|
|
33
|
-
export declare const decode: (input: string | null | number,
|
|
31
|
+
export declare const decode: (input: string | null | number, field: Field & {
|
|
32
|
+
databasePath?: string;
|
|
33
|
+
}) => string | number | boolean | null | (string | number | null | boolean)[];
|
|
34
34
|
/**
|
|
35
35
|
* Asynchronously reads and decodes data from a file at specified line numbers.
|
|
36
36
|
* Decodes each line based on specified field types and an optional secret key.
|
|
37
37
|
*
|
|
38
38
|
* @param filePath - Path of the file to be read.
|
|
39
39
|
* @param lineNumbers - Optional line number(s) to read from the file. If -1, reads the last line.
|
|
40
|
-
* @param
|
|
41
|
-
* @param
|
|
42
|
-
* @param secretKey - Optional secret key for decoding, can be a string or Buffer.
|
|
40
|
+
* @param field - Field object config.
|
|
41
|
+
* @param readWholeFile - Optional Flag to indicate whether to continue reading the file after reaching the limit.
|
|
43
42
|
* @returns Promise resolving to a tuple:
|
|
44
43
|
* 1. Record of line numbers and their decoded content or null if no lines are read.
|
|
45
44
|
* 2. Total count of lines processed.
|
|
46
45
|
*/
|
|
47
|
-
export declare function get(filePath: string, lineNumbers?: number | number[],
|
|
48
|
-
|
|
46
|
+
export declare function get(filePath: string, lineNumbers?: number | number[], field?: Field & {
|
|
47
|
+
databasePath?: string;
|
|
48
|
+
}, readWholeFile?: false): Promise<Record<number, string | number | boolean | null | (string | number | boolean | (string | number | boolean)[] | null)[]> | null>;
|
|
49
|
+
export declare function get(filePath: string, lineNumbers: undefined | number | number[], field: undefined | (Field & {
|
|
50
|
+
databasePath?: string;
|
|
51
|
+
}), readWholeFile: true): Promise<[
|
|
49
52
|
Record<number, string | number | boolean | null | (string | number | boolean | (string | number | boolean)[] | null)[]> | null,
|
|
50
53
|
number
|
|
51
54
|
]>;
|
|
@@ -95,19 +98,19 @@ export declare const remove: (filePath: string, linesToDelete: number | number[]
|
|
|
95
98
|
* @param operator - Comparison operator(s) for evaluation (e.g., '=', '!=', '>', '<').
|
|
96
99
|
* @param comparedAtValue - Value(s) to compare each line against.
|
|
97
100
|
* @param logicalOperator - Optional logical operator ('and' or 'or') for combining multiple comparisons.
|
|
98
|
-
* @param
|
|
99
|
-
* @param fieldChildrenType - Optional type for child elements in array inputs.
|
|
101
|
+
* @param field - Field object config.
|
|
100
102
|
* @param limit - Optional limit on the number of results to return.
|
|
101
103
|
* @param offset - Optional offset to start returning results from.
|
|
102
104
|
* @param readWholeFile - Flag to indicate whether to continue reading the file after reaching the limit.
|
|
103
|
-
* @param secretKey - Optional secret key for decoding, can be a string or Buffer.
|
|
104
105
|
* @returns Promise resolving to a tuple:
|
|
105
106
|
* 1. Record of line numbers and their content that match the criteria or null if none.
|
|
106
107
|
* 2. The count of found items or processed items based on the 'readWholeFile' flag.
|
|
107
108
|
*
|
|
108
109
|
* Note: Decodes each line for comparison and can handle complex queries with multiple conditions.
|
|
109
110
|
*/
|
|
110
|
-
export declare const search: (filePath: string, operator: ComparisonOperator | ComparisonOperator[], comparedAtValue: string | number | boolean | null | (string | number | boolean | null)[], logicalOperator?: "and" | "or", searchIn?: Set<number>,
|
|
111
|
+
export declare const search: (filePath: string, operator: ComparisonOperator | ComparisonOperator[], comparedAtValue: string | number | boolean | null | (string | number | boolean | null)[], logicalOperator?: "and" | "or", searchIn?: Set<number>, field?: Field & {
|
|
112
|
+
databasePath?: string;
|
|
113
|
+
}, limit?: number, offset?: number, readWholeFile?: boolean) => Promise<[Record<number, string | number | boolean | null | (string | number | boolean | null)[]> | null, number, Set<number> | null]>;
|
|
111
114
|
/**
|
|
112
115
|
* Asynchronously calculates the sum of numerical values from specified lines in a file.
|
|
113
116
|
*
|
package/dist/file.js
CHANGED
|
@@ -5,6 +5,7 @@ import { Transform } from "node:stream";
|
|
|
5
5
|
import { pipeline } from "node:stream/promises";
|
|
6
6
|
import { createGunzip, createGzip } from "node:zlib";
|
|
7
7
|
import Inison from "inison";
|
|
8
|
+
import { globalConfig, } from "./index.js";
|
|
8
9
|
import { detectFieldType, isArrayOfObjects, isNumber, isObject, isStringified, } from "./utils.js";
|
|
9
10
|
import { compare, encodeID, exec, gunzip, gzip } from "./utils.server.js";
|
|
10
11
|
export const lock = async (folderPath, prefix) => {
|
|
@@ -131,34 +132,39 @@ const unSecureString = (input) => {
|
|
|
131
132
|
* Handles different data types and structures, including nested arrays.
|
|
132
133
|
*
|
|
133
134
|
* @param value - The value to be decoded, can be string, number, or array.
|
|
134
|
-
* @param
|
|
135
|
+
* @param field - Field object config.
|
|
135
136
|
* @param fieldChildrenType - Optional type for children elements, used for arrays.
|
|
136
|
-
* @param secretKey - Optional secret key for decoding, can be string or Buffer.
|
|
137
137
|
* @returns Decoded value, transformed according to the specified field type(s).
|
|
138
138
|
*/
|
|
139
|
-
const decodeHelper = (value,
|
|
140
|
-
if (Array.isArray(value) &&
|
|
141
|
-
return value.map((v) => decodeHelper(v,
|
|
142
|
-
switch (
|
|
139
|
+
const decodeHelper = (value, field) => {
|
|
140
|
+
if (Array.isArray(value) && field.type !== "array")
|
|
141
|
+
return value.map((v) => decodeHelper(v, field));
|
|
142
|
+
switch (field.type) {
|
|
143
143
|
case "number":
|
|
144
144
|
return isNumber(value) ? Number(value) : null;
|
|
145
145
|
case "boolean":
|
|
146
146
|
return typeof value === "string" ? value === "true" : Boolean(value);
|
|
147
147
|
case "array":
|
|
148
148
|
if (!Array.isArray(value))
|
|
149
|
-
|
|
150
|
-
if (
|
|
151
|
-
return
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
149
|
+
value = [value];
|
|
150
|
+
if (field.children && !isArrayOfObjects(field.children))
|
|
151
|
+
return value.map((v) => decode(v, {
|
|
152
|
+
...field,
|
|
153
|
+
type: Array.isArray(field.children)
|
|
154
|
+
? detectFieldType(v, field.children)
|
|
155
|
+
: field.children,
|
|
156
|
+
}));
|
|
156
157
|
break;
|
|
157
158
|
case "table":
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
159
|
+
return isNumber(value) &&
|
|
160
|
+
(!field.table ||
|
|
161
|
+
!field.databasePath ||
|
|
162
|
+
!globalConfig[field.databasePath].tables.get(field.table).config
|
|
163
|
+
.decodeID)
|
|
164
|
+
? encodeID(value)
|
|
161
165
|
: value;
|
|
166
|
+
case "id":
|
|
167
|
+
return isNumber(value) ? encodeID(value) : value;
|
|
162
168
|
default:
|
|
163
169
|
return value;
|
|
164
170
|
}
|
|
@@ -168,25 +174,21 @@ const decodeHelper = (value, fieldType, fieldChildrenType, secretKey) => {
|
|
|
168
174
|
* Handles different formats of input, including strings, numbers, and their array representations.
|
|
169
175
|
*
|
|
170
176
|
* @param input - The input to be decoded, can be a string, number, or null.
|
|
171
|
-
* @param
|
|
172
|
-
* @param fieldChildrenType - Optional type for child elements in array inputs.
|
|
173
|
-
* @param secretKey - Optional secret key for decoding, can be a string or Buffer.
|
|
177
|
+
* @param field - Field object config.
|
|
174
178
|
* @returns Decoded value as a string, number, boolean, or array of these, or null if no fieldType or input is null/empty.
|
|
175
179
|
*/
|
|
176
|
-
export const decode = (input,
|
|
177
|
-
if (!fieldType)
|
|
178
|
-
return null;
|
|
180
|
+
export const decode = (input, field) => {
|
|
179
181
|
if (input === null || input === "")
|
|
180
182
|
return undefined;
|
|
181
183
|
// Detect the fieldType based on the input and the provided array of possible types.
|
|
182
|
-
if (Array.isArray(
|
|
183
|
-
|
|
184
|
+
if (Array.isArray(field.type))
|
|
185
|
+
field.type = detectFieldType(String(input), field.type);
|
|
184
186
|
// Decode the input using the decodeHelper function.
|
|
185
187
|
return decodeHelper(typeof input === "string"
|
|
186
188
|
? isStringified(input)
|
|
187
189
|
? Inison.unstringify(input)
|
|
188
190
|
: unSecureString(input)
|
|
189
|
-
: input,
|
|
191
|
+
: input, field);
|
|
190
192
|
};
|
|
191
193
|
function _groupIntoRanges(arr, action = "p") {
|
|
192
194
|
if (arr.length === 0)
|
|
@@ -211,7 +213,7 @@ function _groupIntoRanges(arr, action = "p") {
|
|
|
211
213
|
ranges.push(start === end ? `${start}` : `${start},${end}`);
|
|
212
214
|
return ranges.map((range) => `${range}${action}`).join(";");
|
|
213
215
|
}
|
|
214
|
-
export async function get(filePath, lineNumbers,
|
|
216
|
+
export async function get(filePath, lineNumbers, field, readWholeFile = false) {
|
|
215
217
|
let fileHandle = null;
|
|
216
218
|
try {
|
|
217
219
|
fileHandle = await open(filePath, "r");
|
|
@@ -220,7 +222,7 @@ export async function get(filePath, lineNumbers, fieldType, fieldChildrenType, s
|
|
|
220
222
|
if (!lineNumbers) {
|
|
221
223
|
for await (const line of rl) {
|
|
222
224
|
linesCount++;
|
|
223
|
-
lines[linesCount] = decode(line,
|
|
225
|
+
lines[linesCount] = decode(line, field);
|
|
224
226
|
}
|
|
225
227
|
}
|
|
226
228
|
else if (lineNumbers == -1) {
|
|
@@ -229,7 +231,7 @@ export async function get(filePath, lineNumbers, fieldType, fieldChildrenType, s
|
|
|
229
231
|
? `zcat ${escapedFilePath} | sed -n '$p'`
|
|
230
232
|
: `sed -n '$p' ${escapedFilePath}`, foundedLine = (await exec(command)).stdout.trimEnd();
|
|
231
233
|
if (foundedLine)
|
|
232
|
-
lines[linesCount] = decode(foundedLine,
|
|
234
|
+
lines[linesCount] = decode(foundedLine, field);
|
|
233
235
|
}
|
|
234
236
|
else {
|
|
235
237
|
lineNumbers = Array.isArray(lineNumbers) ? lineNumbers : [lineNumbers];
|
|
@@ -241,7 +243,7 @@ export async function get(filePath, lineNumbers, fieldType, fieldChildrenType, s
|
|
|
241
243
|
linesCount++;
|
|
242
244
|
if (!lineNumbersArray.has(linesCount))
|
|
243
245
|
continue;
|
|
244
|
-
lines[linesCount] = decode(line,
|
|
246
|
+
lines[linesCount] = decode(line, field);
|
|
245
247
|
lineNumbersArray.delete(linesCount);
|
|
246
248
|
}
|
|
247
249
|
return [lines, linesCount];
|
|
@@ -252,7 +254,7 @@ export async function get(filePath, lineNumbers, fieldType, fieldChildrenType, s
|
|
|
252
254
|
: `sed -n '${_groupIntoRanges(lineNumbers)}' ${escapedFilePath}`, foundedLines = (await exec(command)).stdout.trimEnd().split("\n");
|
|
253
255
|
let index = 0;
|
|
254
256
|
for (const line of foundedLines) {
|
|
255
|
-
lines[lineNumbers[index]] = decode(line,
|
|
257
|
+
lines[lineNumbers[index]] = decode(line, field);
|
|
256
258
|
index++;
|
|
257
259
|
}
|
|
258
260
|
}
|
|
@@ -504,19 +506,17 @@ export const remove = async (filePath, linesToDelete) => {
|
|
|
504
506
|
* @param operator - Comparison operator(s) for evaluation (e.g., '=', '!=', '>', '<').
|
|
505
507
|
* @param comparedAtValue - Value(s) to compare each line against.
|
|
506
508
|
* @param logicalOperator - Optional logical operator ('and' or 'or') for combining multiple comparisons.
|
|
507
|
-
* @param
|
|
508
|
-
* @param fieldChildrenType - Optional type for child elements in array inputs.
|
|
509
|
+
* @param field - Field object config.
|
|
509
510
|
* @param limit - Optional limit on the number of results to return.
|
|
510
511
|
* @param offset - Optional offset to start returning results from.
|
|
511
512
|
* @param readWholeFile - Flag to indicate whether to continue reading the file after reaching the limit.
|
|
512
|
-
* @param secretKey - Optional secret key for decoding, can be a string or Buffer.
|
|
513
513
|
* @returns Promise resolving to a tuple:
|
|
514
514
|
* 1. Record of line numbers and their content that match the criteria or null if none.
|
|
515
515
|
* 2. The count of found items or processed items based on the 'readWholeFile' flag.
|
|
516
516
|
*
|
|
517
517
|
* Note: Decodes each line for comparison and can handle complex queries with multiple conditions.
|
|
518
518
|
*/
|
|
519
|
-
export const search = async (filePath, operator, comparedAtValue, logicalOperator, searchIn,
|
|
519
|
+
export const search = async (filePath, operator, comparedAtValue, logicalOperator, searchIn, field, limit, offset, readWholeFile) => {
|
|
520
520
|
// Initialize a Map to store the matching lines with their line numbers.
|
|
521
521
|
const matchingLines = {};
|
|
522
522
|
// Initialize counters for line number, found items, and processed items.
|
|
@@ -526,10 +526,10 @@ export const search = async (filePath, operator, comparedAtValue, logicalOperato
|
|
|
526
526
|
const meetsConditions = (value) => (Array.isArray(operator) &&
|
|
527
527
|
Array.isArray(comparedAtValue) &&
|
|
528
528
|
((logicalOperator === "or" &&
|
|
529
|
-
operator.some((single_operator, index) => compare(single_operator, value, comparedAtValue[index],
|
|
530
|
-
operator.every((single_operator, index) => compare(single_operator, value, comparedAtValue[index],
|
|
529
|
+
operator.some((single_operator, index) => compare(single_operator, value, comparedAtValue[index], field.type))) ||
|
|
530
|
+
operator.every((single_operator, index) => compare(single_operator, value, comparedAtValue[index], field.type)))) ||
|
|
531
531
|
(!Array.isArray(operator) &&
|
|
532
|
-
compare(operator, value, comparedAtValue,
|
|
532
|
+
compare(operator, value, comparedAtValue, field.type));
|
|
533
533
|
try {
|
|
534
534
|
// Open the file for reading.
|
|
535
535
|
fileHandle = await open(filePath, "r");
|
|
@@ -544,7 +544,7 @@ export const search = async (filePath, operator, comparedAtValue, logicalOperato
|
|
|
544
544
|
(!searchIn.has(linesCount) || searchIn.has(-linesCount)))
|
|
545
545
|
continue;
|
|
546
546
|
// Decode the line for comparison.
|
|
547
|
-
const decodedLine = decode(line,
|
|
547
|
+
const decodedLine = decode(line, field);
|
|
548
548
|
// Check if the line meets the specified conditions based on comparison and logical operators.
|
|
549
549
|
const doesMeetCondition = (Array.isArray(decodedLine) &&
|
|
550
550
|
decodedLine.flat().some(meetsConditions)) ||
|
|
@@ -600,7 +600,7 @@ export const sum = async (filePath, lineNumbers) => {
|
|
|
600
600
|
linesCount++;
|
|
601
601
|
if (!lineNumbersArray.has(linesCount))
|
|
602
602
|
continue;
|
|
603
|
-
sum += +(decode(line, "number") ?? 0);
|
|
603
|
+
sum += +(decode(line, { key: "BLABLA", type: "number" }) ?? 0);
|
|
604
604
|
lineNumbersArray.delete(linesCount);
|
|
605
605
|
if (!lineNumbersArray.size)
|
|
606
606
|
break;
|
|
@@ -608,7 +608,7 @@ export const sum = async (filePath, lineNumbers) => {
|
|
|
608
608
|
}
|
|
609
609
|
else
|
|
610
610
|
for await (const line of rl)
|
|
611
|
-
sum += +(decode(line, "number") ?? 0);
|
|
611
|
+
sum += +(decode(line, { key: "BLABLA", type: "number" }) ?? 0);
|
|
612
612
|
return sum;
|
|
613
613
|
}
|
|
614
614
|
finally {
|
|
@@ -636,7 +636,7 @@ export const max = async (filePath, lineNumbers) => {
|
|
|
636
636
|
linesCount++;
|
|
637
637
|
if (!lineNumbersArray.has(linesCount))
|
|
638
638
|
continue;
|
|
639
|
-
const lineContentNum = +(decode(line, "number") ?? 0);
|
|
639
|
+
const lineContentNum = +(decode(line, { key: "BLABLA", type: "number" }) ?? 0);
|
|
640
640
|
if (lineContentNum > max)
|
|
641
641
|
max = lineContentNum;
|
|
642
642
|
lineNumbersArray.delete(linesCount);
|
|
@@ -646,7 +646,7 @@ export const max = async (filePath, lineNumbers) => {
|
|
|
646
646
|
}
|
|
647
647
|
else
|
|
648
648
|
for await (const line of rl) {
|
|
649
|
-
const lineContentNum = +(decode(line, "number") ?? 0);
|
|
649
|
+
const lineContentNum = +(decode(line, { key: "BLABLA", type: "number" }) ?? 0);
|
|
650
650
|
if (lineContentNum > max)
|
|
651
651
|
max = lineContentNum;
|
|
652
652
|
}
|
|
@@ -677,7 +677,7 @@ export const min = async (filePath, lineNumbers) => {
|
|
|
677
677
|
linesCount++;
|
|
678
678
|
if (!lineNumbersArray.has(linesCount))
|
|
679
679
|
continue;
|
|
680
|
-
const lineContentNum = +(decode(line, "number") ?? 0);
|
|
680
|
+
const lineContentNum = +(decode(line, { key: "BLABLA", type: "number" }) ?? 0);
|
|
681
681
|
if (lineContentNum < min)
|
|
682
682
|
min = lineContentNum;
|
|
683
683
|
lineNumbersArray.delete(linesCount);
|
|
@@ -687,7 +687,7 @@ export const min = async (filePath, lineNumbers) => {
|
|
|
687
687
|
}
|
|
688
688
|
else
|
|
689
689
|
for await (const line of rl) {
|
|
690
|
-
const lineContentNum = +(decode(line, "number") ?? 0);
|
|
690
|
+
const lineContentNum = +(decode(line, { key: "BLABLA", type: "number" }) ?? 0);
|
|
691
691
|
if (lineContentNum < min)
|
|
692
692
|
min = lineContentNum;
|
|
693
693
|
}
|
package/dist/index.d.ts
CHANGED
|
@@ -53,13 +53,18 @@ declare global {
|
|
|
53
53
|
}
|
|
54
54
|
export type ErrorCodes = "GROUP_UNIQUE" | "FIELD_UNIQUE" | "FIELD_REQUIRED" | "NO_SCHEMA" | "TABLE_EMPTY" | "INVALID_ID" | "INVALID_TYPE" | "INVALID_PARAMETERS" | "NO_ENV" | "TABLE_EXISTS" | "TABLE_NOT_EXISTS" | "INVALID_REGEX_MATCH";
|
|
55
55
|
export type ErrorLang = "en" | "ar" | "fr" | "es";
|
|
56
|
+
export declare const globalConfig: {
|
|
57
|
+
[database: string]: {
|
|
58
|
+
tables?: Map<string, TableObject>;
|
|
59
|
+
};
|
|
60
|
+
} & {
|
|
61
|
+
salt?: string | Buffer;
|
|
62
|
+
};
|
|
56
63
|
export default class Inibase {
|
|
57
64
|
pageInfo: Record<string, pageInfo>;
|
|
58
65
|
language: ErrorLang;
|
|
59
|
-
|
|
66
|
+
fileExtension: string;
|
|
60
67
|
private databasePath;
|
|
61
|
-
private fileExtension;
|
|
62
|
-
private tablesMap;
|
|
63
68
|
private uniqueMap;
|
|
64
69
|
private totalItems;
|
|
65
70
|
constructor(database: string, mainFolder?: string, language?: ErrorLang);
|
package/dist/index.js
CHANGED
|
@@ -10,13 +10,12 @@ import * as Utils from "./utils.js";
|
|
|
10
10
|
import * as UtilsServer from "./utils.server.js";
|
|
11
11
|
// hide ExperimentalWarning glob()
|
|
12
12
|
process.removeAllListeners("warning");
|
|
13
|
+
export const globalConfig = {};
|
|
13
14
|
export default class Inibase {
|
|
14
15
|
pageInfo;
|
|
15
16
|
language;
|
|
16
|
-
salt;
|
|
17
|
-
databasePath;
|
|
18
17
|
fileExtension = ".txt";
|
|
19
|
-
|
|
18
|
+
databasePath;
|
|
20
19
|
uniqueMap;
|
|
21
20
|
totalItems;
|
|
22
21
|
constructor(database, mainFolder = ".", language = "en") {
|
|
@@ -27,11 +26,11 @@ export default class Inibase {
|
|
|
27
26
|
if (existsSync(".env") &&
|
|
28
27
|
readFileSync(".env").includes("INIBASE_SECRET="))
|
|
29
28
|
throw this.createError("NO_ENV");
|
|
30
|
-
|
|
31
|
-
appendFileSync(".env", `\nINIBASE_SECRET=${
|
|
29
|
+
globalConfig.salt = scryptSync(randomBytes(16), randomBytes(16), 32);
|
|
30
|
+
appendFileSync(".env", `\nINIBASE_SECRET=${globalConfig.salt.toString("hex")}\n`);
|
|
32
31
|
}
|
|
33
32
|
else
|
|
34
|
-
|
|
33
|
+
globalConfig.salt = Buffer.from(process.env.INIBASE_SECRET, "hex");
|
|
35
34
|
}
|
|
36
35
|
static errorMessages = {
|
|
37
36
|
en: {
|
|
@@ -112,7 +111,7 @@ export default class Inibase {
|
|
|
112
111
|
return error;
|
|
113
112
|
}
|
|
114
113
|
clear() {
|
|
115
|
-
this.
|
|
114
|
+
globalConfig[this.databasePath] = { tables: new Map() };
|
|
116
115
|
this.totalItems = new Map();
|
|
117
116
|
this.pageInfo = {};
|
|
118
117
|
this.uniqueMap = new Map();
|
|
@@ -120,9 +119,9 @@ export default class Inibase {
|
|
|
120
119
|
getFileExtension(tableName) {
|
|
121
120
|
let mainExtension = this.fileExtension;
|
|
122
121
|
// TODO: ADD ENCRYPTION
|
|
123
|
-
// if(this.
|
|
122
|
+
// if(globalConfig[this.databasePath].tables.get(tableName).config.encryption)
|
|
124
123
|
// mainExtension += ".enc"
|
|
125
|
-
if (this.
|
|
124
|
+
if (globalConfig[this.databasePath].tables.get(tableName).config.compression)
|
|
126
125
|
mainExtension += ".gz";
|
|
127
126
|
return mainExtension;
|
|
128
127
|
}
|
|
@@ -134,9 +133,7 @@ export default class Inibase {
|
|
|
134
133
|
Utils.isArrayOfObjects(field.children))
|
|
135
134
|
Utils.deepMerge(RETURN, this._schemaToIdsPath(tableName, field.children, `${(prefix ?? "") + field.key}.`));
|
|
136
135
|
else if (field.id)
|
|
137
|
-
RETURN[Utils.isValidID(field.id)
|
|
138
|
-
? UtilsServer.decodeID(field.id, this.salt)
|
|
139
|
-
: field.id] = `${(prefix ?? "") + field.key}${this.getFileExtension(tableName)}`;
|
|
136
|
+
RETURN[Utils.isValidID(field.id) ? UtilsServer.decodeID(field.id) : field.id] = `${(prefix ?? "") + field.key}${this.getFileExtension(tableName)}`;
|
|
140
137
|
return RETURN;
|
|
141
138
|
}
|
|
142
139
|
/**
|
|
@@ -170,7 +167,7 @@ export default class Inibase {
|
|
|
170
167
|
}
|
|
171
168
|
if (schema) {
|
|
172
169
|
const lastSchemaID = { value: 0 };
|
|
173
|
-
await writeFile(join(tablePath, "schema.json"), JSON.stringify(UtilsServer.addIdToSchema(schema, lastSchemaID
|
|
170
|
+
await writeFile(join(tablePath, "schema.json"), JSON.stringify(UtilsServer.addIdToSchema(schema, lastSchemaID), null, 2));
|
|
174
171
|
await writeFile(join(tablePath, `${lastSchemaID.value}.schema`), "");
|
|
175
172
|
}
|
|
176
173
|
else
|
|
@@ -206,7 +203,7 @@ export default class Inibase {
|
|
|
206
203
|
};
|
|
207
204
|
if (await File.isExists(join(tablePath, "schema.json"))) {
|
|
208
205
|
// update columns files names based on field id
|
|
209
|
-
schema = UtilsServer.addIdToSchema(schema, lastSchemaID
|
|
206
|
+
schema = UtilsServer.addIdToSchema(schema, lastSchemaID);
|
|
210
207
|
if (table.schema?.length) {
|
|
211
208
|
const replaceOldPathes = Utils.findChangedProperties(this._schemaToIdsPath(tableName, table.schema), this._schemaToIdsPath(tableName, schema));
|
|
212
209
|
if (replaceOldPathes)
|
|
@@ -217,7 +214,7 @@ export default class Inibase {
|
|
|
217
214
|
}
|
|
218
215
|
}
|
|
219
216
|
else
|
|
220
|
-
schema = UtilsServer.addIdToSchema(schema, lastSchemaID
|
|
217
|
+
schema = UtilsServer.addIdToSchema(schema, lastSchemaID);
|
|
221
218
|
await writeFile(join(tablePath, "schema.json"), JSON.stringify(schema, null, 2));
|
|
222
219
|
if (schemaIdFilePath)
|
|
223
220
|
await rename(schemaIdFilePath, join(tablePath, `${lastSchemaID.value}.schema`));
|
|
@@ -291,7 +288,7 @@ export default class Inibase {
|
|
|
291
288
|
await this.replaceStringInFile(schemaPath, `"table": "${tableName}"`, `"table": "${config.name}"`);
|
|
292
289
|
}
|
|
293
290
|
}
|
|
294
|
-
this.
|
|
291
|
+
globalConfig[this.databasePath].tables.delete(tableName);
|
|
295
292
|
}
|
|
296
293
|
/**
|
|
297
294
|
* Get table schema and config
|
|
@@ -303,8 +300,8 @@ export default class Inibase {
|
|
|
303
300
|
const tablePath = join(this.databasePath, tableName);
|
|
304
301
|
if (!(await File.isExists(tablePath)))
|
|
305
302
|
throw this.createError("TABLE_NOT_EXISTS", tableName);
|
|
306
|
-
if (!this.
|
|
307
|
-
this.
|
|
303
|
+
if (!globalConfig[this.databasePath].tables.has(tableName))
|
|
304
|
+
globalConfig[this.databasePath].tables.set(tableName, {
|
|
308
305
|
schema: await this.getTableSchema(tableName, encodeIDs),
|
|
309
306
|
config: {
|
|
310
307
|
compression: await File.isExists(join(tablePath, ".compression.config")),
|
|
@@ -313,7 +310,7 @@ export default class Inibase {
|
|
|
313
310
|
decodeID: await File.isExists(join(tablePath, ".decodeID.config")),
|
|
314
311
|
},
|
|
315
312
|
});
|
|
316
|
-
return this.
|
|
313
|
+
return globalConfig[this.databasePath].tables.get(tableName);
|
|
317
314
|
}
|
|
318
315
|
async getTableSchema(tableName, encodeIDs = true) {
|
|
319
316
|
const tablePath = join(this.databasePath, tableName);
|
|
@@ -345,7 +342,7 @@ export default class Inibase {
|
|
|
345
342
|
];
|
|
346
343
|
if (!encodeIDs)
|
|
347
344
|
return schema;
|
|
348
|
-
return UtilsServer.encodeSchemaID(schema
|
|
345
|
+
return UtilsServer.encodeSchemaID(schema);
|
|
349
346
|
}
|
|
350
347
|
async throwErrorIfTableEmpty(tableName) {
|
|
351
348
|
const table = await this.getTable(tableName, false);
|
|
@@ -370,11 +367,7 @@ export default class Inibase {
|
|
|
370
367
|
throw this.createError("FIELD_REQUIRED", field.key);
|
|
371
368
|
continue;
|
|
372
369
|
}
|
|
373
|
-
if (!Utils.validateFieldType(data[field.key], field
|
|
374
|
-
field.children &&
|
|
375
|
-
!Utils.isArrayOfObjects(field.children)
|
|
376
|
-
? field.children
|
|
377
|
-
: undefined))
|
|
370
|
+
if (!Utils.validateFieldType(data[field.key], field))
|
|
378
371
|
throw this.createError("INVALID_TYPE", [
|
|
379
372
|
field.key,
|
|
380
373
|
(Array.isArray(field.type) ? field.type.join(", ") : field.type) +
|
|
@@ -432,7 +425,7 @@ export default class Inibase {
|
|
|
432
425
|
async validateData(tableName, data, skipRequiredField = false) {
|
|
433
426
|
const clonedData = structuredClone(data);
|
|
434
427
|
// Skip ID and (created|updated)At
|
|
435
|
-
this._validateData(clonedData, this.
|
|
428
|
+
this._validateData(clonedData, globalConfig[this.databasePath].tables.get(tableName).schema.slice(1, -2), skipRequiredField);
|
|
436
429
|
await this.checkUnique(tableName);
|
|
437
430
|
}
|
|
438
431
|
cleanObject(obj) {
|
|
@@ -473,12 +466,12 @@ export default class Inibase {
|
|
|
473
466
|
Utils.isNumber(value.id)))
|
|
474
467
|
return Utils.isNumber(value.id)
|
|
475
468
|
? Number(value.id)
|
|
476
|
-
: UtilsServer.decodeID(value.id
|
|
469
|
+
: UtilsServer.decodeID(value.id);
|
|
477
470
|
}
|
|
478
471
|
else if (Utils.isValidID(value) || Utils.isNumber(value))
|
|
479
472
|
return Utils.isNumber(value)
|
|
480
473
|
? Number(value)
|
|
481
|
-
: UtilsServer.decodeID(value
|
|
474
|
+
: UtilsServer.decodeID(value);
|
|
482
475
|
break;
|
|
483
476
|
case "password":
|
|
484
477
|
return Utils.isPassword(value)
|
|
@@ -493,7 +486,7 @@ export default class Inibase {
|
|
|
493
486
|
case "id":
|
|
494
487
|
return Utils.isNumber(value)
|
|
495
488
|
? value
|
|
496
|
-
: UtilsServer.decodeID(value
|
|
489
|
+
: UtilsServer.decodeID(value);
|
|
497
490
|
case "json": {
|
|
498
491
|
if (typeof value === "string" && Utils.isStringified(value))
|
|
499
492
|
return value;
|
|
@@ -513,7 +506,7 @@ export default class Inibase {
|
|
|
513
506
|
}
|
|
514
507
|
async checkUnique(tableName) {
|
|
515
508
|
const tablePath = join(this.databasePath, tableName);
|
|
516
|
-
const flattenSchema = Utils.flattenSchema(this.
|
|
509
|
+
const flattenSchema = Utils.flattenSchema(globalConfig[this.databasePath].tables.get(tableName).schema);
|
|
517
510
|
function hasDuplicates(setA, setB) {
|
|
518
511
|
for (const value of setA)
|
|
519
512
|
if (setB.has(value))
|
|
@@ -529,7 +522,7 @@ export default class Inibase {
|
|
|
529
522
|
index++;
|
|
530
523
|
const field = flattenSchema.find(({ id }) => id === columnID);
|
|
531
524
|
fieldsKeys.push(field.key);
|
|
532
|
-
const [_, totalLines, lineNumbers] = await File.search(join(tablePath, `${field.key}${this.getFileExtension(tableName)}`), "[]", Array.from(values), undefined, valueObject.exclude, field
|
|
525
|
+
const [_, totalLines, lineNumbers] = await File.search(join(tablePath, `${field.key}${this.getFileExtension(tableName)}`), "[]", Array.from(values), undefined, valueObject.exclude, { ...field, databasePath: this.databasePath }, 1, undefined, false);
|
|
533
526
|
if (totalLines > 0) {
|
|
534
527
|
if (valueObject.columnsValues.size === 1 ||
|
|
535
528
|
(valueObject.columnsValues.size === index &&
|
|
@@ -727,9 +720,14 @@ export default class Inibase {
|
|
|
727
720
|
async processSimpleField(tableName, field, linesNumber, RETURN, _options, prefix) {
|
|
728
721
|
const fieldPath = join(this.databasePath, tableName, `${prefix ?? ""}${field.key}${this.getFileExtension(tableName)}`);
|
|
729
722
|
if (await File.isExists(fieldPath)) {
|
|
730
|
-
const items = await File.get(fieldPath, linesNumber,
|
|
731
|
-
|
|
732
|
-
: field.
|
|
723
|
+
const items = await File.get(fieldPath, linesNumber, {
|
|
724
|
+
...field,
|
|
725
|
+
type: field.key === "id" &&
|
|
726
|
+
globalConfig[this.databasePath].tables.get(tableName).config.decodeID
|
|
727
|
+
? "number"
|
|
728
|
+
: field.type,
|
|
729
|
+
databasePath: this.databasePath,
|
|
730
|
+
});
|
|
733
731
|
if (items) {
|
|
734
732
|
for (const [index, item] of Object.entries(items)) {
|
|
735
733
|
if (typeof item === "undefined")
|
|
@@ -875,7 +873,10 @@ export default class Inibase {
|
|
|
875
873
|
(await File.isExists(join(this.databasePath, field.table)))) {
|
|
876
874
|
const fieldPath = join(this.databasePath, tableName, `${prefix ?? ""}${field.key}${this.getFileExtension(tableName)}`);
|
|
877
875
|
if (await File.isExists(fieldPath)) {
|
|
878
|
-
const itemsIDs = await File.get(fieldPath, linesNumber,
|
|
876
|
+
const itemsIDs = (await File.get(fieldPath, linesNumber, {
|
|
877
|
+
...field,
|
|
878
|
+
databasePath: this.databasePath,
|
|
879
|
+
}));
|
|
879
880
|
const isArrayField = this.isArrayField(field.type);
|
|
880
881
|
if (itemsIDs) {
|
|
881
882
|
const searchableIDs = new Map();
|
|
@@ -888,26 +889,29 @@ export default class Inibase {
|
|
|
888
889
|
searchableIDs.set(lineNumber, lineContent);
|
|
889
890
|
}
|
|
890
891
|
if (searchableIDs.size) {
|
|
891
|
-
const items = await this.get(field.table, isArrayField
|
|
892
|
-
? Array.from(
|
|
893
|
-
:
|
|
892
|
+
const items = await this.get(field.table, Array.from(new Set(isArrayField
|
|
893
|
+
? Array.from(searchableIDs.values()).flat()
|
|
894
|
+
: searchableIDs.values())).flat(), {
|
|
894
895
|
...options,
|
|
895
896
|
perPage: Number.POSITIVE_INFINITY,
|
|
896
897
|
columns: options.columns
|
|
897
898
|
?.filter((column) => column.includes(`${field.key}.`))
|
|
898
899
|
.map((column) => column.replace(`${field.key}.`, "")),
|
|
899
900
|
});
|
|
900
|
-
|
|
901
|
-
|
|
902
|
-
|
|
903
|
-
?
|
|
904
|
-
?
|
|
905
|
-
:
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
901
|
+
for (const [lineNumber, lineContent] of searchableIDs.entries())
|
|
902
|
+
RETURN[lineNumber][field.key] = isArrayField
|
|
903
|
+
? Utils.isArrayOfArrays(lineContent)
|
|
904
|
+
? lineContent.map((item) => items
|
|
905
|
+
? items.filter(({ id }) => item.includes(id))
|
|
906
|
+
: {
|
|
907
|
+
id: item,
|
|
908
|
+
})
|
|
909
|
+
: lineContent.flatMap((item) => items
|
|
910
|
+
? items.find(({ id }) => item === id)
|
|
911
|
+
: { id: item })
|
|
912
|
+
: (items?.find(({ id }) => id === lineContent) ?? {
|
|
913
|
+
id: lineContent,
|
|
914
|
+
});
|
|
911
915
|
}
|
|
912
916
|
}
|
|
913
917
|
}
|
|
@@ -950,7 +954,7 @@ export default class Inibase {
|
|
|
950
954
|
criteria = Utils.toDotNotation(criteria, ["or", "and"]);
|
|
951
955
|
let index = -1;
|
|
952
956
|
for await (const [key, value] of Object.entries(criteria)) {
|
|
953
|
-
const field = Utils.getField(key, this.
|
|
957
|
+
const field = Utils.getField(key, globalConfig[this.databasePath].tables.get(tableName).schema);
|
|
954
958
|
index++;
|
|
955
959
|
let searchOperator = undefined;
|
|
956
960
|
let searchComparedAtValue = undefined;
|
|
@@ -1008,9 +1012,7 @@ export default class Inibase {
|
|
|
1008
1012
|
searchOperator = "=";
|
|
1009
1013
|
searchComparedAtValue = value;
|
|
1010
1014
|
}
|
|
1011
|
-
const [searchResult, totalLines, linesNumbers] = await File.search(join(tablePath, `${key}${this.getFileExtension(tableName)}`), searchOperator ?? "=", searchComparedAtValue ?? null, searchLogicalOperator, allTrue ? searchIn : undefined, field
|
|
1012
|
-
? undefined
|
|
1013
|
-
: this.salt);
|
|
1015
|
+
const [searchResult, totalLines, linesNumbers] = await File.search(join(tablePath, `${key}${this.getFileExtension(tableName)}`), searchOperator ?? "=", searchComparedAtValue ?? null, searchLogicalOperator, allTrue ? searchIn : undefined, { ...field, databasePath: this.databasePath }, options.perPage, (options.page - 1) * options.perPage + 1, true);
|
|
1014
1016
|
if (searchResult) {
|
|
1015
1017
|
const formatedSearchResult = Object.fromEntries(Object.entries(searchResult).map(([id, value]) => {
|
|
1016
1018
|
const nestedObj = {};
|
|
@@ -1128,13 +1130,13 @@ export default class Inibase {
|
|
|
1128
1130
|
.map((column) => [column, true]);
|
|
1129
1131
|
let cacheKey = "";
|
|
1130
1132
|
// Criteria
|
|
1131
|
-
if (this.
|
|
1133
|
+
if (globalConfig[this.databasePath].tables.get(tableName).config.cache)
|
|
1132
1134
|
cacheKey = UtilsServer.hashString(inspect(sortArray, { sorted: true }));
|
|
1133
1135
|
if (where) {
|
|
1134
1136
|
const lineNumbers = await this.get(tableName, where, undefined, undefined, true);
|
|
1135
1137
|
if (!lineNumbers?.length)
|
|
1136
1138
|
return null;
|
|
1137
|
-
const itemsIDs = Object.values((await File.get(join(tablePath, `id${this.getFileExtension(tableName)}`), lineNumbers, "
|
|
1139
|
+
const itemsIDs = Object.values((await File.get(join(tablePath, `id${this.getFileExtension(tableName)}`), lineNumbers, { key: "BLABLA", type: "number" })) ?? {}).map(Number);
|
|
1138
1140
|
awkCommand = `awk '${itemsIDs.map((id) => `$1 == ${id}`).join(" || ")}'`;
|
|
1139
1141
|
}
|
|
1140
1142
|
else
|
|
@@ -1157,9 +1159,7 @@ export default class Inibase {
|
|
|
1157
1159
|
.map(([key, ascending], i) => {
|
|
1158
1160
|
const field = Utils.getField(key, schema);
|
|
1159
1161
|
if (field)
|
|
1160
|
-
return `-k${i + index},${i + index}${Utils.isFieldType(["id", "number", "date"]
|
|
1161
|
-
? "n"
|
|
1162
|
-
: ""}${!ascending ? "r" : ""}`;
|
|
1162
|
+
return `-k${i + index},${i + index}${Utils.isFieldType(field, ["id", "number", "date"]) ? "n" : ""}${!ascending ? "r" : ""}`;
|
|
1163
1163
|
return "";
|
|
1164
1164
|
})
|
|
1165
1165
|
.join(" ");
|
|
@@ -1168,7 +1168,7 @@ export default class Inibase {
|
|
|
1168
1168
|
if (cacheKey)
|
|
1169
1169
|
await File.lock(join(tablePath, ".tmp"), cacheKey);
|
|
1170
1170
|
// Combine && Execute the commands synchronously
|
|
1171
|
-
let lines = (await UtilsServer.exec(this.
|
|
1171
|
+
let lines = (await UtilsServer.exec(globalConfig[this.databasePath].tables.get(tableName).config.cache
|
|
1172
1172
|
? (await File.isExists(join(tablePath, ".cache", `${cacheKey}${this.fileExtension}`)))
|
|
1173
1173
|
? `${awkCommand} '${join(tablePath, ".cache", `${cacheKey}${this.fileExtension}`)}'`
|
|
1174
1174
|
: `${pasteCommand} | ${sortCommand} -o '${join(tablePath, ".cache", `${cacheKey}${this.fileExtension}`)}' && ${awkCommand} '${join(tablePath, ".cache", `${cacheKey}${this.fileExtension}`)}'`
|
|
@@ -1192,10 +1192,11 @@ export default class Inibase {
|
|
|
1192
1192
|
const field = Utils.getField(parse(fileName).name, schema);
|
|
1193
1193
|
if (field) {
|
|
1194
1194
|
if (field.key === "id" &&
|
|
1195
|
-
this.
|
|
1195
|
+
globalConfig[this.databasePath].tables.get(tableName).config
|
|
1196
|
+
.decodeID)
|
|
1196
1197
|
outputObject[field.key] = splitedFileColumns[index];
|
|
1197
1198
|
else
|
|
1198
|
-
outputObject[field.key] = File.decode(splitedFileColumns[index],
|
|
1199
|
+
outputObject[field.key] = File.decode(splitedFileColumns[index], { ...field, databasePath: this.databasePath });
|
|
1199
1200
|
}
|
|
1200
1201
|
});
|
|
1201
1202
|
return outputObject;
|
|
@@ -1223,7 +1224,8 @@ export default class Inibase {
|
|
|
1223
1224
|
}
|
|
1224
1225
|
else if (((Array.isArray(where) && where.every(Utils.isNumber)) ||
|
|
1225
1226
|
Utils.isNumber(where)) &&
|
|
1226
|
-
(_whereIsLinesNumbers ||
|
|
1227
|
+
(_whereIsLinesNumbers ||
|
|
1228
|
+
!globalConfig[this.databasePath].tables.get(tableName).config.decodeID)) {
|
|
1227
1229
|
// "where" in this case, is the line(s) number(s) and not id(s)
|
|
1228
1230
|
let lineNumbers = where;
|
|
1229
1231
|
if (!Array.isArray(lineNumbers))
|
|
@@ -1238,7 +1240,7 @@ export default class Inibase {
|
|
|
1238
1240
|
RETURN = RETURN[0];
|
|
1239
1241
|
}
|
|
1240
1242
|
else if ((!_whereIsLinesNumbers &&
|
|
1241
|
-
this.
|
|
1243
|
+
globalConfig[this.databasePath].tables.get(tableName).config.decodeID &&
|
|
1242
1244
|
((Array.isArray(where) && where.every(Utils.isNumber)) ||
|
|
1243
1245
|
Utils.isNumber(where))) ||
|
|
1244
1246
|
(Array.isArray(where) && where.every(Utils.isValidID)) ||
|
|
@@ -1246,7 +1248,7 @@ export default class Inibase {
|
|
|
1246
1248
|
let Ids = where;
|
|
1247
1249
|
if (!Array.isArray(Ids))
|
|
1248
1250
|
Ids = [Ids];
|
|
1249
|
-
const [lineNumbers, countItems] = await File.search(join(tablePath, `id${this.getFileExtension(tableName)}`), "[]", Ids.map((id) => Utils.isNumber(id) ? Number(id) : UtilsServer.decodeID(id
|
|
1251
|
+
const [lineNumbers, countItems] = await File.search(join(tablePath, `id${this.getFileExtension(tableName)}`), "[]", Ids.map((id) => Utils.isNumber(id) ? Number(id) : UtilsServer.decodeID(id)), undefined, undefined, { key: "BLABLA", type: "number" }, Ids.length, 0, !this.totalItems.has(`${tableName}-*`));
|
|
1250
1252
|
if (!lineNumbers)
|
|
1251
1253
|
return null;
|
|
1252
1254
|
if (!this.totalItems.has(`${tableName}-*`))
|
|
@@ -1267,7 +1269,7 @@ export default class Inibase {
|
|
|
1267
1269
|
else if (Utils.isObject(where)) {
|
|
1268
1270
|
let cachedFilePath = "";
|
|
1269
1271
|
// Criteria
|
|
1270
|
-
if (this.
|
|
1272
|
+
if (globalConfig[this.databasePath].tables.get(tableName).config.cache) {
|
|
1271
1273
|
cachedFilePath = join(tablePath, ".cache", `${UtilsServer.hashString(inspect(where, { sorted: true }))}${this.fileExtension}`);
|
|
1272
1274
|
if (await File.isExists(cachedFilePath)) {
|
|
1273
1275
|
const cachedItems = (await readFile(cachedFilePath, "utf8")).split(",");
|
|
@@ -1292,9 +1294,9 @@ export default class Inibase {
|
|
|
1292
1294
|
const alreadyExistsColumnsIDs = Utils.flattenSchema(schema)
|
|
1293
1295
|
.filter(({ key }) => alreadyExistsColumns.includes(key))
|
|
1294
1296
|
.map(({ id }) => id);
|
|
1295
|
-
RETURN = Object.values(Utils.deepMerge(LineNumberDataMap, await this.processSchemaData(tableName, Utils.filterSchema(schema, (
|
|
1296
|
-
Utils.isFieldType("table"
|
|
1297
|
-
if (this.
|
|
1297
|
+
RETURN = Object.values(Utils.deepMerge(LineNumberDataMap, await this.processSchemaData(tableName, Utils.filterSchema(schema, (field) => !alreadyExistsColumnsIDs.includes(field.id) ||
|
|
1298
|
+
Utils.isFieldType(field, "table")), Object.keys(LineNumberDataMap).map(Number), options)));
|
|
1299
|
+
if (globalConfig[this.databasePath].tables.get(tableName).config.cache)
|
|
1298
1300
|
await writeFile(cachedFilePath, Array.from(linesNumbers).join(","));
|
|
1299
1301
|
}
|
|
1300
1302
|
}
|
|
@@ -1323,7 +1325,7 @@ export default class Inibase {
|
|
|
1323
1325
|
};
|
|
1324
1326
|
const tablePath = join(this.databasePath, tableName);
|
|
1325
1327
|
await this.getTable(tableName);
|
|
1326
|
-
if (!this.
|
|
1328
|
+
if (!globalConfig[this.databasePath].tables.get(tableName).schema)
|
|
1327
1329
|
throw this.createError("NO_SCHEMA", tableName);
|
|
1328
1330
|
if (!returnPostedData)
|
|
1329
1331
|
returnPostedData = false;
|
|
@@ -1352,25 +1354,25 @@ export default class Inibase {
|
|
|
1352
1354
|
clonedData.createdAt = Date.now();
|
|
1353
1355
|
clonedData.updatedAt = undefined;
|
|
1354
1356
|
}
|
|
1355
|
-
clonedData = this.formatData(clonedData, this.
|
|
1356
|
-
const pathesContents = this.joinPathesContents(tableName, this.
|
|
1357
|
+
clonedData = this.formatData(clonedData, globalConfig[this.databasePath].tables.get(tableName).schema, false);
|
|
1358
|
+
const pathesContents = this.joinPathesContents(tableName, globalConfig[this.databasePath].tables.get(tableName).config.prepend
|
|
1357
1359
|
? Array.isArray(clonedData)
|
|
1358
1360
|
? clonedData.toReversed()
|
|
1359
1361
|
: clonedData
|
|
1360
1362
|
: clonedData);
|
|
1361
|
-
await Promise.allSettled(Object.entries(pathesContents).map(async ([path, content]) => renameList.push(this.
|
|
1363
|
+
await Promise.allSettled(Object.entries(pathesContents).map(async ([path, content]) => renameList.push(globalConfig[this.databasePath].tables.get(tableName).config.prepend
|
|
1362
1364
|
? await File.prepend(path, content)
|
|
1363
1365
|
: await File.append(path, content))));
|
|
1364
1366
|
await Promise.allSettled(renameList
|
|
1365
1367
|
.filter(([_, filePath]) => filePath)
|
|
1366
1368
|
.map(async ([tempPath, filePath]) => rename(tempPath, filePath)));
|
|
1367
|
-
if (this.
|
|
1369
|
+
if (globalConfig[this.databasePath].tables.get(tableName).config.cache)
|
|
1368
1370
|
await this.clearCache(tableName);
|
|
1369
1371
|
const currentValue = this.totalItems.get(`${tableName}-*`) || 0;
|
|
1370
1372
|
this.totalItems.set(`${tableName}-*`, currentValue + (Array.isArray(data) ? data.length : 1));
|
|
1371
1373
|
await rename(paginationFilePath, join(tablePath, `${lastId}-${this.totalItems.get(`${tableName}-*`)}.pagination`));
|
|
1372
1374
|
if (returnPostedData)
|
|
1373
|
-
return this.get(tableName, this.
|
|
1375
|
+
return this.get(tableName, globalConfig[this.databasePath].tables.get(tableName).config.prepend
|
|
1374
1376
|
? Array.isArray(clonedData)
|
|
1375
1377
|
? clonedData.map((_, index) => index + 1).toReversed()
|
|
1376
1378
|
: 1
|
|
@@ -1381,10 +1383,10 @@ export default class Inibase {
|
|
|
1381
1383
|
: this.totalItems.get(`${tableName}-*`), options, !Utils.isArrayOfObjects(clonedData), // return only one item if data is not array of objects
|
|
1382
1384
|
undefined, true);
|
|
1383
1385
|
return Array.isArray(clonedData)
|
|
1384
|
-
? (this.
|
|
1386
|
+
? (globalConfig[this.databasePath].tables.get(tableName).config.prepend
|
|
1385
1387
|
? clonedData.toReversed()
|
|
1386
|
-
: clonedData).map(({ id }) => UtilsServer.encodeID(id
|
|
1387
|
-
: UtilsServer.encodeID(clonedData.id
|
|
1388
|
+
: clonedData).map(({ id }) => UtilsServer.encodeID(id))
|
|
1389
|
+
: UtilsServer.encodeID(clonedData.id);
|
|
1388
1390
|
}
|
|
1389
1391
|
finally {
|
|
1390
1392
|
if (renameList.length)
|
|
@@ -1412,7 +1414,7 @@ export default class Inibase {
|
|
|
1412
1414
|
return this.put(tableName, clonedData, clonedData.id, options, returnUpdatedData);
|
|
1413
1415
|
}
|
|
1414
1416
|
await this.validateData(tableName, clonedData, true);
|
|
1415
|
-
clonedData = this.formatData(clonedData, this.
|
|
1417
|
+
clonedData = this.formatData(clonedData, globalConfig[this.databasePath].tables.get(tableName).schema, true);
|
|
1416
1418
|
const pathesContents = this.joinPathesContents(tableName, {
|
|
1417
1419
|
...(({ id, ...restOfData }) => restOfData)(clonedData),
|
|
1418
1420
|
updatedAt: Date.now(),
|
|
@@ -1427,7 +1429,7 @@ export default class Inibase {
|
|
|
1427
1429
|
await Promise.allSettled(renameList
|
|
1428
1430
|
.filter(([_, filePath]) => filePath)
|
|
1429
1431
|
.map(async ([tempPath, filePath]) => rename(tempPath, filePath)));
|
|
1430
|
-
if (this.
|
|
1432
|
+
if (globalConfig[this.databasePath].tables.get(tableName).config.cache)
|
|
1431
1433
|
await this.clearCache(join(tablePath, ".cache"));
|
|
1432
1434
|
if (returnUpdatedData)
|
|
1433
1435
|
return await this.get(tableName, undefined, options);
|
|
@@ -1440,10 +1442,11 @@ export default class Inibase {
|
|
|
1440
1442
|
}
|
|
1441
1443
|
else if (((Array.isArray(where) && where.every(Utils.isNumber)) ||
|
|
1442
1444
|
Utils.isNumber(where)) &&
|
|
1443
|
-
(_whereIsLinesNumbers ||
|
|
1445
|
+
(_whereIsLinesNumbers ||
|
|
1446
|
+
!globalConfig[this.databasePath].tables.get(tableName).config.decodeID)) {
|
|
1444
1447
|
// "where" in this case, is the line(s) number(s) and not id(s)
|
|
1445
1448
|
await this.validateData(tableName, clonedData, true);
|
|
1446
|
-
clonedData = this.formatData(clonedData, this.
|
|
1449
|
+
clonedData = this.formatData(clonedData, globalConfig[this.databasePath].tables.get(tableName).schema, true);
|
|
1447
1450
|
const pathesContents = Object.fromEntries(Object.entries(this.joinPathesContents(tableName, Array.isArray(clonedData)
|
|
1448
1451
|
? clonedData.map((item) => ({
|
|
1449
1452
|
...item,
|
|
@@ -1465,7 +1468,7 @@ export default class Inibase {
|
|
|
1465
1468
|
await Promise.allSettled(renameList
|
|
1466
1469
|
.filter(([_, filePath]) => filePath)
|
|
1467
1470
|
.map(async ([tempPath, filePath]) => rename(tempPath, filePath)));
|
|
1468
|
-
if (this.
|
|
1471
|
+
if (globalConfig[this.databasePath].tables.get(tableName).config.cache)
|
|
1469
1472
|
await this.clearCache(tableName);
|
|
1470
1473
|
if (returnUpdatedData)
|
|
1471
1474
|
return this.get(tableName, where, options, !Array.isArray(where), undefined, true);
|
|
@@ -1477,7 +1480,7 @@ export default class Inibase {
|
|
|
1477
1480
|
}
|
|
1478
1481
|
}
|
|
1479
1482
|
else if ((!_whereIsLinesNumbers &&
|
|
1480
|
-
this.
|
|
1483
|
+
globalConfig[this.databasePath].tables.get(tableName).config.decodeID &&
|
|
1481
1484
|
((Array.isArray(where) && where.every(Utils.isNumber)) ||
|
|
1482
1485
|
Utils.isNumber(where))) ||
|
|
1483
1486
|
(Array.isArray(where) && where.every(Utils.isValidID)) ||
|
|
@@ -1520,7 +1523,7 @@ export default class Inibase {
|
|
|
1520
1523
|
await Promise.all((await readdir(tablePath))
|
|
1521
1524
|
?.filter((fileName) => fileName.endsWith(this.getFileExtension(tableName)))
|
|
1522
1525
|
.map(async (file) => unlink(join(tablePath, file))));
|
|
1523
|
-
if (this.
|
|
1526
|
+
if (globalConfig[this.databasePath].tables.get(tableName).config.cache)
|
|
1524
1527
|
await this.clearCache(tableName);
|
|
1525
1528
|
await rename(paginationFilePath, join(tablePath, `${pagination[0]}-0.pagination`));
|
|
1526
1529
|
return true;
|
|
@@ -1531,7 +1534,8 @@ export default class Inibase {
|
|
|
1531
1534
|
}
|
|
1532
1535
|
if (((Array.isArray(where) && where.every(Utils.isNumber)) ||
|
|
1533
1536
|
Utils.isNumber(where)) &&
|
|
1534
|
-
(_whereIsLinesNumbers ||
|
|
1537
|
+
(_whereIsLinesNumbers ||
|
|
1538
|
+
!globalConfig[this.databasePath].tables.get(tableName).config.decodeID)) {
|
|
1535
1539
|
// "where" in this case, is the line(s) number(s) and not id(s)
|
|
1536
1540
|
const files = (await readdir(tablePath))?.filter((fileName) => fileName.endsWith(this.getFileExtension(tableName)));
|
|
1537
1541
|
if (files.length) {
|
|
@@ -1559,7 +1563,7 @@ export default class Inibase {
|
|
|
1559
1563
|
await Promise.all((await readdir(tablePath))
|
|
1560
1564
|
?.filter((fileName) => fileName.endsWith(this.getFileExtension(tableName)))
|
|
1561
1565
|
.map(async (file) => unlink(join(tablePath, file))));
|
|
1562
|
-
if (this.
|
|
1566
|
+
if (globalConfig[this.databasePath].tables.get(tableName).config.cache)
|
|
1563
1567
|
await this.clearCache(tableName);
|
|
1564
1568
|
await rename(paginationFilePath, join(tablePath, `${pagination[0]}-${pagination[1] - (Array.isArray(where) ? where.length : 1)}.pagination`));
|
|
1565
1569
|
return true;
|
|
@@ -1572,7 +1576,7 @@ export default class Inibase {
|
|
|
1572
1576
|
}
|
|
1573
1577
|
}
|
|
1574
1578
|
if ((!_whereIsLinesNumbers &&
|
|
1575
|
-
this.
|
|
1579
|
+
globalConfig[this.databasePath].tables.get(tableName).config.decodeID &&
|
|
1576
1580
|
((Array.isArray(where) && where.every(Utils.isNumber)) ||
|
|
1577
1581
|
Utils.isNumber(where))) ||
|
|
1578
1582
|
(Array.isArray(where) && where.every(Utils.isValidID)) ||
|
package/dist/utils.d.ts
CHANGED
|
@@ -156,18 +156,17 @@ export declare const findChangedProperties: (obj1: Record<string, string>, obj2:
|
|
|
156
156
|
* @returns The detected field type as a string, or undefined if no matching type is found.
|
|
157
157
|
*/
|
|
158
158
|
export declare const detectFieldType: (input: any, availableTypes: FieldType[]) => FieldType | undefined;
|
|
159
|
-
export declare const isFieldType: (compareAtType: string | string[]
|
|
159
|
+
export declare const isFieldType: (field: Field, compareAtType: string | string[]) => boolean;
|
|
160
160
|
export declare const flattenSchema: (schema: Schema, keepParents?: boolean) => Schema;
|
|
161
161
|
export declare const filterSchema: (schema: Schema, callback: (arg0: Field) => boolean) => Field[];
|
|
162
162
|
/**
|
|
163
163
|
* Validates if the given value matches the specified field type(s).
|
|
164
164
|
*
|
|
165
165
|
* @param value - The value to be validated.
|
|
166
|
-
* @param
|
|
167
|
-
* @param fieldChildrenType - Optional; the expected type(s) of children elements, used if the field type is an array.
|
|
166
|
+
* @param field - Field object config.
|
|
168
167
|
* @returns A boolean indicating whether the value matches the specified field type(s).
|
|
169
168
|
*/
|
|
170
|
-
export declare const validateFieldType: (value: any,
|
|
169
|
+
export declare const validateFieldType: (value: any, field: Field) => boolean;
|
|
171
170
|
export declare const FormatObjectCriteriaValue: (value: string) => [ComparisonOperator, string | number | boolean | null | (string | number | null)[]];
|
|
172
171
|
/**
|
|
173
172
|
* Get field from schema
|
package/dist/utils.js
CHANGED
|
@@ -256,6 +256,8 @@ export const detectFieldType = (input, availableTypes) => {
|
|
|
256
256
|
return "number";
|
|
257
257
|
if (availableTypes.includes("string") && String(input).at(0) === "0")
|
|
258
258
|
return "string";
|
|
259
|
+
if (availableTypes.includes("id"))
|
|
260
|
+
return "id";
|
|
259
261
|
}
|
|
260
262
|
else if (typeof input === "string") {
|
|
261
263
|
if (availableTypes.includes("table") && isValidID(input))
|
|
@@ -282,30 +284,28 @@ export const detectFieldType = (input, availableTypes) => {
|
|
|
282
284
|
return "array";
|
|
283
285
|
return undefined;
|
|
284
286
|
};
|
|
285
|
-
export const isFieldType = (
|
|
286
|
-
if (
|
|
287
|
-
if (Array.isArray(
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
: compareAtType === type))
|
|
291
|
-
return true;
|
|
292
|
-
}
|
|
293
|
-
else if ((Array.isArray(compareAtType) && compareAtType.includes(fieldType)) ||
|
|
294
|
-
compareAtType === fieldType)
|
|
287
|
+
export const isFieldType = (field, compareAtType) => {
|
|
288
|
+
if (Array.isArray(field.type)) {
|
|
289
|
+
if (field.type.some((type) => Array.isArray(compareAtType)
|
|
290
|
+
? compareAtType.includes(type)
|
|
291
|
+
: compareAtType === type))
|
|
295
292
|
return true;
|
|
296
293
|
}
|
|
297
|
-
if (
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
294
|
+
else if ((Array.isArray(compareAtType) && compareAtType.includes(field.type)) ||
|
|
295
|
+
compareAtType === field.type)
|
|
296
|
+
return true;
|
|
297
|
+
if (field.children) {
|
|
298
|
+
if (Array.isArray(field.children)) {
|
|
299
|
+
if (!isArrayOfObjects(field.children)) {
|
|
300
|
+
if (field.children.some((type) => Array.isArray(compareAtType)
|
|
301
301
|
? compareAtType.includes(type)
|
|
302
302
|
: compareAtType === type))
|
|
303
303
|
return true;
|
|
304
304
|
}
|
|
305
305
|
}
|
|
306
306
|
else if ((Array.isArray(compareAtType) &&
|
|
307
|
-
compareAtType.includes(
|
|
308
|
-
compareAtType ===
|
|
307
|
+
compareAtType.includes(field.children)) ||
|
|
308
|
+
compareAtType === field.children)
|
|
309
309
|
return true;
|
|
310
310
|
}
|
|
311
311
|
return false;
|
|
@@ -339,32 +339,35 @@ export const filterSchema = (schema, callback) => schema.filter((field) => {
|
|
|
339
339
|
* Validates if the given value matches the specified field type(s).
|
|
340
340
|
*
|
|
341
341
|
* @param value - The value to be validated.
|
|
342
|
-
* @param
|
|
343
|
-
* @param fieldChildrenType - Optional; the expected type(s) of children elements, used if the field type is an array.
|
|
342
|
+
* @param field - Field object config.
|
|
344
343
|
* @returns A boolean indicating whether the value matches the specified field type(s).
|
|
345
344
|
*/
|
|
346
|
-
export const validateFieldType = (value,
|
|
345
|
+
export const validateFieldType = (value, field) => {
|
|
347
346
|
if (value === null)
|
|
348
347
|
return true;
|
|
349
|
-
if (Array.isArray(
|
|
350
|
-
const detectedFieldType = detectFieldType(value,
|
|
348
|
+
if (Array.isArray(field.type)) {
|
|
349
|
+
const detectedFieldType = detectFieldType(value, field.type);
|
|
351
350
|
if (!detectedFieldType)
|
|
352
351
|
return false;
|
|
353
|
-
|
|
352
|
+
field.type = detectedFieldType;
|
|
354
353
|
}
|
|
355
|
-
if (
|
|
354
|
+
if (field.type === "array" && field.children)
|
|
356
355
|
return (Array.isArray(value) &&
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
356
|
+
(isArrayOfObjects(field.children) ||
|
|
357
|
+
value.every((v) => {
|
|
358
|
+
let _fieldChildrenType = field.children;
|
|
359
|
+
if (Array.isArray(_fieldChildrenType)) {
|
|
360
|
+
const detectedFieldType = detectFieldType(v, _fieldChildrenType);
|
|
361
|
+
if (!detectedFieldType)
|
|
362
|
+
return false;
|
|
363
|
+
_fieldChildrenType = detectedFieldType;
|
|
364
|
+
}
|
|
365
|
+
return validateFieldType(v, {
|
|
366
|
+
key: "BLABLA",
|
|
367
|
+
type: _fieldChildrenType,
|
|
368
|
+
});
|
|
369
|
+
})));
|
|
370
|
+
switch (field.type) {
|
|
368
371
|
case "string":
|
|
369
372
|
return isString(value);
|
|
370
373
|
case "password":
|
package/dist/utils.server.d.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { execFile as execFileSync, exec as execSync } from "node:child_process";
|
|
2
2
|
import { gunzip as gunzipSync, gzip as gzipSync } from "node:zlib";
|
|
3
|
-
import type { ComparisonOperator, Field, FieldType, Schema } from "./index.js";
|
|
4
3
|
import RE2 from "re2";
|
|
4
|
+
import type { ComparisonOperator, Field, FieldType, Schema } from "./index.js";
|
|
5
5
|
export declare const exec: typeof execSync.__promisify__;
|
|
6
6
|
export declare const execFile: typeof execFileSync.__promisify__;
|
|
7
7
|
export declare const gzip: typeof gzipSync.__promisify__;
|
|
@@ -21,30 +21,27 @@ export declare const hashPassword: (password: string) => string;
|
|
|
21
21
|
* @returns A boolean indicating whether the input password matches the hashed password.
|
|
22
22
|
*/
|
|
23
23
|
export declare const comparePassword: (hash: string, password: string) => boolean;
|
|
24
|
-
export declare const encodeID: (id: number | string
|
|
25
|
-
export declare const decodeID: (input: string
|
|
26
|
-
export declare const extractIdsFromSchema: (schema: Schema
|
|
24
|
+
export declare const encodeID: (id: number | string) => string;
|
|
25
|
+
export declare const decodeID: (input: string) => number;
|
|
26
|
+
export declare const extractIdsFromSchema: (schema: Schema) => number[];
|
|
27
27
|
/**
|
|
28
28
|
* Finds the last ID number in a schema, potentially decoding it if encrypted.
|
|
29
29
|
*
|
|
30
30
|
* @param schema - The schema to search, defined as an array of schema objects.
|
|
31
|
-
* @param secretKeyOrSalt - The secret key or salt for decoding an encrypted ID, can be a string, number, or Buffer.
|
|
32
31
|
* @returns The last ID number in the schema, decoded if necessary.
|
|
33
32
|
*/
|
|
34
|
-
export declare const findLastIdNumber: (schema: Schema
|
|
33
|
+
export declare const findLastIdNumber: (schema: Schema) => number;
|
|
35
34
|
/**
|
|
36
35
|
* Adds or updates IDs in a schema, encoding them using a provided secret key or salt.
|
|
37
36
|
*
|
|
38
37
|
* @param schema - The schema to update, defined as an array of schema objects.
|
|
39
38
|
* @param startWithID - An object containing the starting ID for generating new IDs.
|
|
40
|
-
* @param secretKeyOrSalt - The secret key or salt for encoding IDs, can be a string, number, or Buffer.
|
|
41
|
-
* @param encodeIDs - If true, IDs will be encoded, else they will remain as numbers.
|
|
42
39
|
* @returns The updated schema with encoded IDs.
|
|
43
40
|
*/
|
|
44
41
|
export declare const addIdToSchema: (schema: Schema, startWithID: {
|
|
45
42
|
value: number;
|
|
46
|
-
}
|
|
47
|
-
export declare const encodeSchemaID: (schema: Schema
|
|
43
|
+
}) => Field[];
|
|
44
|
+
export declare const encodeSchemaID: (schema: Schema) => Schema;
|
|
48
45
|
export declare const hashString: (str: string) => string;
|
|
49
46
|
/**
|
|
50
47
|
* Evaluates a comparison between two values based on a specified operator and field types.
|
|
@@ -52,7 +49,7 @@ export declare const hashString: (str: string) => string;
|
|
|
52
49
|
* @param operator - The comparison operator (e.g., '=', '!=', '>', '<', '>=', '<=', '[]', '![]', '*', '!*').
|
|
53
50
|
* @param originalValue - The value to compare, can be a single value or an array of values.
|
|
54
51
|
* @param comparedValue - The value or values to compare against.
|
|
55
|
-
* @param
|
|
52
|
+
* @param field - Field object config.
|
|
56
53
|
* @param fieldChildrenType - Optional type for child elements in array inputs.
|
|
57
54
|
* @returns boolean - Result of the comparison operation.
|
|
58
55
|
*
|
|
@@ -64,7 +61,7 @@ export declare const compare: (operator: ComparisonOperator, originalValue: stri
|
|
|
64
61
|
*
|
|
65
62
|
* @param originalValue - The original value.
|
|
66
63
|
* @param comparedValue - The value to compare against.
|
|
67
|
-
* @param
|
|
64
|
+
* @param field - Field object config.
|
|
68
65
|
* @returns boolean - Result of the equality check.
|
|
69
66
|
*/
|
|
70
67
|
export declare const isEqual: (originalValue: string | number | boolean | null | (string | number | boolean | null)[], comparedValue: string | number | boolean | null | (string | number | boolean | null)[], fieldType?: FieldType) => boolean;
|
package/dist/utils.server.js
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { execFile as execFileSync, exec as execSync } from "node:child_process";
|
|
2
2
|
import { createCipheriv, createDecipheriv, createHash, randomBytes, scryptSync, } from "node:crypto";
|
|
3
|
-
import { gunzip as gunzipSync, gzip as gzipSync } from "node:zlib";
|
|
4
3
|
import { promisify } from "node:util";
|
|
5
|
-
import {
|
|
4
|
+
import { gunzip as gunzipSync, gzip as gzipSync } from "node:zlib";
|
|
6
5
|
import RE2 from "re2";
|
|
6
|
+
import { globalConfig } from "./index.js";
|
|
7
|
+
import { detectFieldType, isArrayOfObjects, isNumber, isPassword, isValidID, } from "./utils.js";
|
|
7
8
|
export const exec = promisify(execSync);
|
|
8
9
|
export const execFile = promisify(execFileSync);
|
|
9
10
|
export const gzip = promisify(gzipSync);
|
|
@@ -38,11 +39,11 @@ export const comparePassword = (hash, password) => {
|
|
|
38
39
|
// Cache for derived keys if using scrypt
|
|
39
40
|
const derivedKeyCache = new Map();
|
|
40
41
|
// Helper function to create cipher or decipher
|
|
41
|
-
const getKeyAndIv = (
|
|
42
|
-
if (Buffer.isBuffer(
|
|
43
|
-
return { key:
|
|
42
|
+
const getKeyAndIv = () => {
|
|
43
|
+
if (Buffer.isBuffer(globalConfig.salt)) {
|
|
44
|
+
return { key: globalConfig.salt, iv: globalConfig.salt.subarray(0, 16) };
|
|
44
45
|
}
|
|
45
|
-
const cacheKey =
|
|
46
|
+
const cacheKey = globalConfig.salt.toString();
|
|
46
47
|
let key = derivedKeyCache.get(cacheKey);
|
|
47
48
|
if (!key) {
|
|
48
49
|
key = scryptSync(cacheKey, `${INIBASE_SECRET}`, 32);
|
|
@@ -53,27 +54,25 @@ const getKeyAndIv = (secretKeyOrSalt) => {
|
|
|
53
54
|
// Ensure the environment variable is read once
|
|
54
55
|
const INIBASE_SECRET = process.env.INIBASE_SECRET ?? "inibase";
|
|
55
56
|
// Optimized encodeID
|
|
56
|
-
export const encodeID = (id
|
|
57
|
-
const { key, iv } = getKeyAndIv(
|
|
57
|
+
export const encodeID = (id) => {
|
|
58
|
+
const { key, iv } = getKeyAndIv();
|
|
58
59
|
const cipher = createCipheriv("aes-256-cbc", key, iv);
|
|
59
60
|
return cipher.update(id.toString(), "utf8", "hex") + cipher.final("hex");
|
|
60
61
|
};
|
|
61
62
|
// Optimized decodeID
|
|
62
|
-
export const decodeID = (input
|
|
63
|
-
const { key, iv } = getKeyAndIv(
|
|
63
|
+
export const decodeID = (input) => {
|
|
64
|
+
const { key, iv } = getKeyAndIv();
|
|
64
65
|
const decipher = createDecipheriv("aes-256-cbc", key, iv);
|
|
65
66
|
return Number(decipher.update(input, "hex", "utf8") + decipher.final("utf8"));
|
|
66
67
|
};
|
|
67
68
|
// Function to recursively flatten an array of objects and their nested children
|
|
68
|
-
export const extractIdsFromSchema = (schema
|
|
69
|
+
export const extractIdsFromSchema = (schema) => {
|
|
69
70
|
const result = [];
|
|
70
71
|
for (const field of schema) {
|
|
71
72
|
if (field.id)
|
|
72
|
-
result.push(typeof field.id === "number"
|
|
73
|
-
? field.id
|
|
74
|
-
: decodeID(field.id, secretKeyOrSalt));
|
|
73
|
+
result.push(typeof field.id === "number" ? field.id : decodeID(field.id));
|
|
75
74
|
if (field.children && isArrayOfObjects(field.children))
|
|
76
|
-
result.push(...extractIdsFromSchema(field.children
|
|
75
|
+
result.push(...extractIdsFromSchema(field.children));
|
|
77
76
|
}
|
|
78
77
|
return result;
|
|
79
78
|
};
|
|
@@ -81,35 +80,24 @@ export const extractIdsFromSchema = (schema, secretKeyOrSalt) => {
|
|
|
81
80
|
* Finds the last ID number in a schema, potentially decoding it if encrypted.
|
|
82
81
|
*
|
|
83
82
|
* @param schema - The schema to search, defined as an array of schema objects.
|
|
84
|
-
* @param secretKeyOrSalt - The secret key or salt for decoding an encrypted ID, can be a string, number, or Buffer.
|
|
85
83
|
* @returns The last ID number in the schema, decoded if necessary.
|
|
86
84
|
*/
|
|
87
|
-
export const findLastIdNumber = (schema
|
|
85
|
+
export const findLastIdNumber = (schema) => Math.max(...extractIdsFromSchema(schema));
|
|
88
86
|
/**
|
|
89
87
|
* Adds or updates IDs in a schema, encoding them using a provided secret key or salt.
|
|
90
88
|
*
|
|
91
89
|
* @param schema - The schema to update, defined as an array of schema objects.
|
|
92
90
|
* @param startWithID - An object containing the starting ID for generating new IDs.
|
|
93
|
-
* @param secretKeyOrSalt - The secret key or salt for encoding IDs, can be a string, number, or Buffer.
|
|
94
|
-
* @param encodeIDs - If true, IDs will be encoded, else they will remain as numbers.
|
|
95
91
|
* @returns The updated schema with encoded IDs.
|
|
96
92
|
*/
|
|
97
|
-
export const addIdToSchema = (schema, startWithID
|
|
93
|
+
export const addIdToSchema = (schema, startWithID) => {
|
|
98
94
|
function _addIdToField(field) {
|
|
99
95
|
if (!field.id) {
|
|
100
96
|
startWithID.value++;
|
|
101
|
-
field.id =
|
|
102
|
-
? encodeID(startWithID.value, secretKeyOrSalt)
|
|
103
|
-
: startWithID.value;
|
|
104
|
-
}
|
|
105
|
-
else {
|
|
106
|
-
if (isValidID(field.id)) {
|
|
107
|
-
if (!encodeIDs)
|
|
108
|
-
field.id = decodeID(field.id, secretKeyOrSalt);
|
|
109
|
-
}
|
|
110
|
-
else if (encodeIDs)
|
|
111
|
-
field.id = encodeID(field.id, secretKeyOrSalt);
|
|
97
|
+
field.id = encodeID(startWithID.value);
|
|
112
98
|
}
|
|
99
|
+
else
|
|
100
|
+
field.id = isValidID(field.id) ? decodeID(field.id) : encodeID(field.id);
|
|
113
101
|
if ((field.type === "array" || field.type === "object") &&
|
|
114
102
|
isArrayOfObjects(field.children))
|
|
115
103
|
field.children = _addIdToSchema(field.children);
|
|
@@ -118,13 +106,13 @@ export const addIdToSchema = (schema, startWithID, secretKeyOrSalt, encodeIDs) =
|
|
|
118
106
|
const _addIdToSchema = (schema) => schema.map(_addIdToField);
|
|
119
107
|
return _addIdToSchema(schema);
|
|
120
108
|
};
|
|
121
|
-
export const encodeSchemaID = (schema
|
|
109
|
+
export const encodeSchemaID = (schema) => schema.map((field) => ({
|
|
122
110
|
...field,
|
|
123
|
-
id: isNumber(field.id) ? encodeID(field.id
|
|
111
|
+
id: isNumber(field.id) ? encodeID(field.id) : field.id,
|
|
124
112
|
...(field.children
|
|
125
113
|
? isArrayOfObjects(field.children)
|
|
126
114
|
? {
|
|
127
|
-
children: encodeSchemaID(field.children
|
|
115
|
+
children: encodeSchemaID(field.children),
|
|
128
116
|
}
|
|
129
117
|
: { children: field.children }
|
|
130
118
|
: {}),
|
|
@@ -136,7 +124,7 @@ export const hashString = (str) => createHash("sha256").update(str).digest("hex"
|
|
|
136
124
|
* @param operator - The comparison operator (e.g., '=', '!=', '>', '<', '>=', '<=', '[]', '![]', '*', '!*').
|
|
137
125
|
* @param originalValue - The value to compare, can be a single value or an array of values.
|
|
138
126
|
* @param comparedValue - The value or values to compare against.
|
|
139
|
-
* @param
|
|
127
|
+
* @param field - Field object config.
|
|
140
128
|
* @param fieldChildrenType - Optional type for child elements in array inputs.
|
|
141
129
|
* @returns boolean - Result of the comparison operation.
|
|
142
130
|
*
|
|
@@ -199,7 +187,7 @@ const compareNonNullValues = (originalValue, comparedValue, comparator) => {
|
|
|
199
187
|
*
|
|
200
188
|
* @param originalValue - The original value.
|
|
201
189
|
* @param comparedValue - The value to compare against.
|
|
202
|
-
* @param
|
|
190
|
+
* @param field - Field object config.
|
|
203
191
|
* @returns boolean - Result of the equality check.
|
|
204
192
|
*/
|
|
205
193
|
export const isEqual = (originalValue, comparedValue, fieldType) => {
|