@dbml/core 6.0.0 → 6.2.0-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/README.md +2 -2
  2. package/lib/export/DbmlExporter.js +53 -1
  3. package/lib/export/MysqlExporter.js +47 -1
  4. package/lib/export/OracleExporter.js +52 -3
  5. package/lib/export/PostgresExporter.js +53 -1
  6. package/lib/export/SqlServerExporter.js +48 -1
  7. package/lib/index.js +103 -0
  8. package/lib/model_structure/field.js +2 -2
  9. package/lib/parse/ANTLR/ASTGeneration/mssql/MssqlASTGen.js +21 -7
  10. package/lib/parse/ANTLR/ASTGeneration/mysql/MySQLASTGen.js +5 -2
  11. package/lib/parse/ANTLR/ASTGeneration/oraclesql/OracleSQLASTGen.js +85 -4
  12. package/lib/parse/ANTLR/ASTGeneration/oraclesql/README.md +5 -5
  13. package/lib/parse/ANTLR/ASTGeneration/postgres/PostgresASTGen.js +10 -0
  14. package/lib/parse/ANTLR/ASTGeneration/snowflake/SnowflakeASTGen.js +90 -5
  15. package/lib/parse/ANTLR/parsers/mysql/MySqlLexerBase.js +1 -1
  16. package/package.json +3 -3
  17. package/types/export/index.d.ts +1 -0
  18. package/types/index.d.ts +68 -2
  19. package/types/model_structure/check.d.ts +12 -10
  20. package/types/model_structure/database.d.ts +51 -41
  21. package/types/model_structure/endpoint.d.ts +24 -11
  22. package/types/model_structure/enum.d.ts +13 -10
  23. package/types/model_structure/enumValue.d.ts +11 -8
  24. package/types/model_structure/field.d.ts +23 -17
  25. package/types/model_structure/index.d.ts +91 -0
  26. package/types/model_structure/indexColumn.d.ts +11 -8
  27. package/types/model_structure/indexes.d.ts +16 -12
  28. package/types/model_structure/ref.d.ts +15 -12
  29. package/types/model_structure/schema.d.ts +17 -14
  30. package/types/model_structure/stickyNote.d.ts +15 -10
  31. package/types/model_structure/table.d.ts +17 -15
  32. package/types/model_structure/tableGroup.d.ts +13 -10
  33. package/types/model_structure/tablePartial.d.ts +13 -10
@@ -989,13 +989,15 @@ var OracleSqlASTGen = exports["default"] = /*#__PURE__*/function (_OracleSqlPars
989
989
  }
990
990
 
991
991
  // insert_statement
992
- // : INSERT (single_table_insert | <other rules>)
992
+ // : INSERT (single_table_insert | multi_table_insert)
993
993
  // ;
994
994
  }, {
995
995
  key: "visitInsert_statement",
996
996
  value: function visitInsert_statement(ctx) {
997
997
  if (ctx.single_table_insert()) {
998
998
  ctx.single_table_insert().accept(this);
999
+ } else if (ctx.multi_table_insert()) {
1000
+ ctx.multi_table_insert().accept(this);
999
1001
  }
1000
1002
  }
1001
1003
 
@@ -1022,18 +1024,97 @@ var OracleSqlASTGen = exports["default"] = /*#__PURE__*/function (_OracleSqlPars
1022
1024
  }
1023
1025
  }
1024
1026
 
1027
+ // multi_table_insert
1028
+ // : (ALL multi_table_element+ | conditional_insert_clause) select_statement
1029
+ // ;
1030
+ }, {
1031
+ key: "visitMulti_table_insert",
1032
+ value: function visitMulti_table_insert(ctx) {
1033
+ var _this14 = this;
1034
+ if (!ctx.multi_table_element()) {
1035
+ // conditional_insert_clause is not supported yet
1036
+ return;
1037
+ }
1038
+
1039
+ // Collect all insert elements from INSERT ALL statement
1040
+ var elements = ctx.multi_table_element().map(function (element) {
1041
+ return element.accept(_this14);
1042
+ }).filter(Boolean);
1043
+
1044
+ // Group elements by table, schema, and columns to combine multiple rows into single records
1045
+ var recordsMap = new Map();
1046
+ elements.forEach(function (element) {
1047
+ var tableName = element.tableName,
1048
+ schemaName = element.schemaName,
1049
+ columns = element.columns,
1050
+ values = element.values;
1051
+ var key = "".concat(schemaName || '', ".").concat(tableName, ".").concat(columns.join(','));
1052
+ if (recordsMap.has(key)) {
1053
+ var _recordsMap$get$value;
1054
+ // Same table and columns - append values to existing record
1055
+ (_recordsMap$get$value = recordsMap.get(key).values).push.apply(_recordsMap$get$value, _toConsumableArray(values));
1056
+ } else {
1057
+ // New combination - create new record entry
1058
+ recordsMap.set(key, {
1059
+ tableName: tableName,
1060
+ schemaName: schemaName,
1061
+ columns: columns,
1062
+ values: values
1063
+ });
1064
+ }
1065
+ });
1066
+
1067
+ // Create TableRecord objects for each unique table/column combination
1068
+ recordsMap.forEach(function (recordData) {
1069
+ var record = new _AST.TableRecord({
1070
+ schemaName: recordData.schemaName,
1071
+ tableName: recordData.tableName,
1072
+ columns: recordData.columns,
1073
+ values: recordData.values
1074
+ });
1075
+ _this14.data.records.push(record);
1076
+ });
1077
+ }
1078
+
1079
+ // multi_table_element
1080
+ // : insert_into_clause values_clause? error_logging_clause?
1081
+ // ;
1082
+ }, {
1083
+ key: "visitMulti_table_element",
1084
+ value: function visitMulti_table_element(ctx) {
1085
+ var _ctx$values_clause;
1086
+ var intoClause = ctx.insert_into_clause().accept(this);
1087
+ var valuesClause = (_ctx$values_clause = ctx.values_clause()) === null || _ctx$values_clause === void 0 ? void 0 : _ctx$values_clause.accept(this);
1088
+ if (intoClause && valuesClause) {
1089
+ var tableName = intoClause.tableName,
1090
+ schemaName = intoClause.schemaName,
1091
+ columns = intoClause.columns;
1092
+ var values = valuesClause.values;
1093
+ return {
1094
+ tableName: tableName,
1095
+ schemaName: schemaName,
1096
+ columns: columns,
1097
+ values: values
1098
+ };
1099
+ }
1100
+ return null;
1101
+ }
1102
+
1025
1103
  // insert_into_clause
1026
1104
  // : INTO general_table_ref paren_column_list?
1027
1105
  // ;
1028
1106
  }, {
1029
1107
  key: "visitInsert_into_clause",
1030
1108
  value: function visitInsert_into_clause(ctx) {
1109
+ var _findTable;
1031
1110
  var names = ctx.general_table_ref().accept(this);
1032
1111
  var tableName = (0, _lodash.last)(names);
1033
1112
  var schemaName = names.length > 1 ? names[names.length - 2] : undefined;
1034
1113
  var columns = ctx.paren_column_list() ? ctx.paren_column_list().accept(this).map(function (c) {
1035
1114
  return (0, _lodash.last)(c);
1036
- }) : [];
1115
+ }) : (_findTable = findTable(this.data.tables, schemaName, tableName)) === null || _findTable === void 0 ? void 0 : _findTable.fields.map(function (field) {
1116
+ return field.name;
1117
+ });
1037
1118
  return {
1038
1119
  tableName: tableName,
1039
1120
  schemaName: schemaName,
@@ -1078,9 +1159,9 @@ var OracleSqlASTGen = exports["default"] = /*#__PURE__*/function (_OracleSqlPars
1078
1159
  }, {
1079
1160
  key: "visitExpressions_",
1080
1161
  value: function visitExpressions_(ctx) {
1081
- var _this14 = this;
1162
+ var _this15 = this;
1082
1163
  return ctx.expression().map(function (e) {
1083
- return e.accept(_this14);
1164
+ return e.accept(_this15);
1084
1165
  });
1085
1166
  }
1086
1167
  }, {
@@ -177,13 +177,13 @@ This module provides SQL parsing capabilities for Oracle databases, enabling con
177
177
  | Feature | Status | Notes |
178
178
  |---------|---------|-------|
179
179
  | Basic `INSERT` ... VALUES | ✓ | `INSERT INTO t (col) VALUES (1)` |
180
- | Multi-row `INSERT` | | Oracle uses `INSERT ALL` syntax |
180
+ | Multi-row `INSERT` | | Supported via `INSERT ALL` syntax |
181
181
  | `INSERT` ... SELECT | ✗ | Subquery as data source |
182
182
  | WITH clause (CTE) | ✗ | CTE before `INSERT` |
183
183
  | Target table alias | ✗ | `INSERT INTO t alias ...` |
184
184
  | `INSERT` ... RETURNING | ◐ | Returns inserted rows - clause is ignored |
185
- | `INSERT ALL` (multi-table insert) | | Oracle-specific multi-table insert |
186
- | Conditional `INSERT` (WHEN/FIRST/ALL) | | Oracle syntax for conditional inserts |
185
+ | `INSERT ALL` (multi-table insert) | | Oracle-specific multi-table insert - fully supported |
186
+ | Conditional `INSERT` (WHEN/FIRST/ALL) | | Conditional inserts with WHEN clauses not yet supported |
187
187
  | `INSERT` OVERWRITE | — | Snowflake/Hive syntax - not valid in Oracle |
188
188
 
189
189
  ---
@@ -243,7 +243,7 @@ This module provides SQL parsing capabilities for Oracle databases, enabling con
243
243
  ## Known Limitations
244
244
 
245
245
  - **`ON UPDATE` for `FOREIGN KEY`**: Oracle does not support `ON UPDATE` actions for foreign keys; only `ON DELETE` is available
246
- - **Multi-row `INSERT`**: Oracle uses `INSERT ALL` syntax which is not currently supported
246
+ - **Conditional `INSERT ALL`**: Basic `INSERT ALL` is fully supported, but conditional inserts with WHEN/FIRST clauses are not yet supported
247
247
  - **DDL modification statements**: `DROP TABLE`, `DROP INDEX`, `ALTER INDEX` not supported
248
248
  - **`INSERT` ... SELECT**: Subqueries in `INSERT` statements not supported
249
249
  - **`CREATE VIEW`**: View definitions are not parsed
@@ -257,5 +257,5 @@ This module provides SQL parsing capabilities for Oracle databases, enabling con
257
257
  3. **Function-based Indexes**: Oracle's function-based indexes (e.g., `CREATE INDEX ON t (UPPER(col))`) are fully supported
258
258
  4. **`ON DELETE` Only**: Oracle foreign keys only support `ON DELETE` actions (`CASCADE`, `SET NULL`, `NO ACTION`). There is no `ON UPDATE` support
259
259
  5. **Comments**: Use `COMMENT ON TABLE/COLUMN` statements. These are separate DDL statements, not inline with `CREATE TABLE`
260
- 6. **No Multi-row `INSERT`**: Oracle uses `INSERT ALL ... SELECT` syntax for multi-row inserts, which differs from other databases
260
+ 6. **`INSERT ALL` Support**: Oracle's `INSERT ALL ... SELECT * FROM dual` syntax for multi-row and multi-table inserts is fully supported. Rows with the same table and columns are automatically grouped together. Conditional inserts with WHEN clauses are not yet supported
261
261
  7. **`VARCHAR2`**: Oracle uses `VARCHAR2` (not `VARCHAR`) as the standard variable-length string type
@@ -1230,6 +1230,16 @@ var PostgresASTGen = exports["default"] = /*#__PURE__*/function (_PostgreSQLPars
1230
1230
  var _ctx$insert_rest$acce = ctx.insert_rest().accept(this),
1231
1231
  columns = _ctx$insert_rest$acce.columns,
1232
1232
  values = _ctx$insert_rest$acce.values;
1233
+
1234
+ // When no columns are specified, lookup table and use all its columns
1235
+ if (columns.length === 0) {
1236
+ var table = findTable(this.data.tables, schemaName, tableName);
1237
+ if (table && table.fields) {
1238
+ columns = table.fields.map(function (field) {
1239
+ return field.name;
1240
+ });
1241
+ }
1242
+ }
1233
1243
  var record = new _AST.TableRecord({
1234
1244
  schemaName: schemaName,
1235
1245
  tableName: tableName,
@@ -33,6 +33,7 @@ function _isNativeReflectConstruct() { try { var t = !Boolean.prototype.valueOf.
33
33
  function _getPrototypeOf(t) { return _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf.bind() : function (t) { return t.__proto__ || Object.getPrototypeOf(t); }, _getPrototypeOf(t); }
34
34
  function _inherits(t, e) { if ("function" != typeof e && null !== e) throw new TypeError("Super expression must either be null or a function"); t.prototype = Object.create(e && e.prototype, { constructor: { value: t, writable: !0, configurable: !0 } }), Object.defineProperty(t, "prototype", { writable: !1 }), e && _setPrototypeOf(t, e); }
35
35
  function _setPrototypeOf(t, e) { return _setPrototypeOf = Object.setPrototypeOf ? Object.setPrototypeOf.bind() : function (t, e) { return t.__proto__ = e, t; }, _setPrototypeOf(t, e); }
36
+ var DEFAULT_SCHEMA = 'public';
36
37
  var sanitizeComment = function sanitizeComment(stringContext) {
37
38
  return (0, _helpers.getOriginalText)(stringContext).replace(/''/g, "'").slice(1, -1);
38
39
  };
@@ -48,14 +49,25 @@ var SnowflakeASTGen = exports["default"] = /*#__PURE__*/function (_SnowflakePars
48
49
  enums: [],
49
50
  tableGroups: [],
50
51
  aliases: [],
51
- project: {}
52
+ project: {},
53
+ records: []
52
54
  };
53
55
  return _this;
54
56
  }
55
-
56
- // batch? EOF
57
57
  _inherits(SnowflakeASTGen, _SnowflakeParserVisit);
58
58
  return _createClass(SnowflakeASTGen, [{
59
+ key: "findTable",
60
+ value: function findTable(schemaName, tableName) {
61
+ var realSchemaName = schemaName || DEFAULT_SCHEMA;
62
+ var table = this.data.tables.find(function (t) {
63
+ var targetSchemaName = t.schemaName || DEFAULT_SCHEMA;
64
+ return targetSchemaName === realSchemaName && t.name === tableName;
65
+ });
66
+ return table;
67
+ }
68
+
69
+ // batch? EOF
70
+ }, {
59
71
  key: "visitSnowflake_file",
60
72
  value: function visitSnowflake_file(ctx) {
61
73
  var _ctx$batch;
@@ -79,6 +91,8 @@ var SnowflakeASTGen = exports["default"] = /*#__PURE__*/function (_SnowflakePars
79
91
  value: function visitSql_command(ctx) {
80
92
  if (ctx.ddl_command()) {
81
93
  ctx.ddl_command().accept(this);
94
+ } else if (ctx.dml_command()) {
95
+ ctx.dml_command().accept(this);
82
96
  }
83
97
  }
84
98
 
@@ -93,6 +107,22 @@ var SnowflakeASTGen = exports["default"] = /*#__PURE__*/function (_SnowflakePars
93
107
  }
94
108
  }
95
109
 
110
+ // dml_command
111
+ // : query_statement
112
+ // | insert_statement
113
+ // | insert_multi_table_statement
114
+ // | update_statement
115
+ // | delete_statement
116
+ // | merge_statement
117
+ // ;
118
+ }, {
119
+ key: "visitDml_command",
120
+ value: function visitDml_command(ctx) {
121
+ if (ctx.insert_statement()) {
122
+ ctx.insert_statement().accept(this);
123
+ }
124
+ }
125
+
96
126
  // check SnowflakeParser.g4 line 1442
97
127
  }, {
98
128
  key: "visitCreate_command",
@@ -171,7 +201,7 @@ var SnowflakeASTGen = exports["default"] = /*#__PURE__*/function (_SnowflakePars
171
201
  var field = fieldData.field,
172
202
  inlineRefs = fieldData.inlineRefs;
173
203
  if (((_field$type$type_name = field.type.type_name) === null || _field$type$type_name === void 0 ? void 0 : _field$type$type_name.toLowerCase()) === 'enum') {
174
- var _values = field.type.args.map(function (arg) {
204
+ var values = field.type.args.map(function (arg) {
175
205
  var newValue = arg.replace(/'|"|`/g, '').trim();
176
206
  return {
177
207
  name: newValue
@@ -180,7 +210,7 @@ var SnowflakeASTGen = exports["default"] = /*#__PURE__*/function (_SnowflakePars
180
210
  var _enum = new _AST.Enum({
181
211
  name: "".concat(tableName, "_").concat(field.name, "_enum"),
182
212
  schemaName: schemaName,
183
- values: _values
213
+ values: values
184
214
  });
185
215
  field.type.type_name = _enum.name;
186
216
  field.type.schemaName = _enum.schemaName;
@@ -743,5 +773,60 @@ var SnowflakeASTGen = exports["default"] = /*#__PURE__*/function (_SnowflakePars
743
773
  }
744
774
  return null;
745
775
  }
776
+
777
+ // insert_statement
778
+ // : INSERT OVERWRITE? INTO object_name column_list_in_parentheses? (
779
+ // values_builder
780
+ // | query_statement
781
+ // )
782
+ // ;
783
+ }, {
784
+ key: "visitInsert_statement",
785
+ value: function visitInsert_statement(ctx) {
786
+ var _this$findTable;
787
+ var _ctx$object_name$acce7 = ctx.object_name().accept(this),
788
+ _ctx$object_name$acce8 = _slicedToArray(_ctx$object_name$acce7, 3),
789
+ databaseName = _ctx$object_name$acce8[0],
790
+ schemaName = _ctx$object_name$acce8[1],
791
+ tableName = _ctx$object_name$acce8[2];
792
+ var columns = ctx.column_list_in_parentheses() ? ctx.column_list_in_parentheses().accept(this) : ((_this$findTable = this.findTable(schemaName, tableName)) === null || _this$findTable === void 0 ? void 0 : _this$findTable.fields.map(function (field) {
793
+ return field.name;
794
+ })) || [];
795
+
796
+ // Only handle values_builder, not query_statement
797
+ var values = ctx.values_builder() ? ctx.values_builder().accept(this) : [];
798
+ var record = new _AST.TableRecord({
799
+ schemaName: schemaName,
800
+ tableName: tableName,
801
+ columns: columns,
802
+ values: values
803
+ });
804
+ this.data.records.push(record);
805
+ }
806
+
807
+ // values_builder
808
+ // : VALUES '(' expr_list ')' (COMMA '(' expr_list ')')?
809
+ // ;
810
+ }, {
811
+ key: "visitValues_builder",
812
+ value: function visitValues_builder(ctx) {
813
+ var _this0 = this;
814
+ return ctx.expr_list().map(function (exprList) {
815
+ var rowValues = exprList.accept(_this0);
816
+ return (0, _lodash.flattenDepth)(rowValues, 1);
817
+ });
818
+ }
819
+
820
+ // expr_list
821
+ // : expr (COMMA expr)*
822
+ // ;
823
+ }, {
824
+ key: "visitExpr_list",
825
+ value: function visitExpr_list(ctx) {
826
+ var _this1 = this;
827
+ return ctx.expr().map(function (expr) {
828
+ return expr.accept(_this1);
829
+ });
830
+ }
746
831
  }]);
747
832
  }(_SnowflakeParserVisitor["default"]);
@@ -45,7 +45,7 @@ var MySqlLexerBase = exports["default"] = /*#__PURE__*/function (_antlr4$Lexer)
45
45
  key: "nextToken",
46
46
  value: function nextToken() {
47
47
  // See the comment above HIGH_PRIORITY_MODE in the Lexer grammar G4 file
48
- this.mode(MySqlLexerBase.DEFAULT_MODE); // always start in DEFAULT_MODE, which will automatically switch to high priority mode
48
+ this.setMode(MySqlLexerBase.DEFAULT_MODE); // always start in DEFAULT_MODE, which will automatically switch to high priority mode
49
49
  return _superPropGet(MySqlLexerBase, "nextToken", this, 3)([]);
50
50
  }
51
51
  }]);
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "$schema": "https://json.schemastore.org/package",
3
3
  "name": "@dbml/core",
4
- "version": "6.0.0",
4
+ "version": "6.2.0-alpha.0",
5
5
  "description": "> TODO: description",
6
6
  "author": "Holistics <dev@holistics.io>",
7
7
  "license": "Apache-2.0",
@@ -36,7 +36,7 @@
36
36
  "lint:fix": "eslint --fix ."
37
37
  },
38
38
  "dependencies": {
39
- "@dbml/parse": "^6.0.0",
39
+ "@dbml/parse": "^6.2.0-alpha.0",
40
40
  "antlr4": "^4.13.1",
41
41
  "lodash": "^4.17.15",
42
42
  "parsimmon": "^1.13.0",
@@ -63,7 +63,7 @@
63
63
  "typescript": "^5.9.3",
64
64
  "typescript-eslint": "^8.46.3"
65
65
  },
66
- "gitHead": "75a218687814d8b1c538b26908745e0d0ee8b35a",
66
+ "gitHead": "00865dc46e1cbab39233ad010c6b5e120131476c",
67
67
  "engines": {
68
68
  "node": ">=16"
69
69
  }
@@ -1,4 +1,5 @@
1
1
  import { ExportFormatOption } from './ModelExporter';
2
+ import { RecordValueType } from '../model_structure/database';
2
3
 
3
4
  declare function _export(str: string, format: ExportFormatOption): string;
4
5
  declare const _default: {
package/types/index.d.ts CHANGED
@@ -2,6 +2,72 @@ import ModelExporter from './export/ModelExporter';
2
2
  import Parser from './parse/Parser';
3
3
  import importer from './import';
4
4
  import exporter from './export';
5
- import { renameTable } from './transform';
6
- export { renameTable, importer, exporter, ModelExporter, Parser };
5
+ import {
6
+ renameTable,
7
+ } from './transform';
8
+ export {
9
+ renameTable,
10
+ importer,
11
+ exporter,
12
+ ModelExporter,
13
+ Parser,
14
+ };
7
15
  export { CompilerDiagnostic, CompilerError as CompilerDiagnostics, EditorPosition, ErrorCode, WarningLevel, } from './parse/error';
16
+
17
+ // Export normalized types
18
+ export type {
19
+ NormalizedDatabase,
20
+ NormalizedDatabaseIdMap,
21
+ NormalizedModel,
22
+ NormalizedSchema,
23
+ NormalizedSchemaIdMap,
24
+ NormalizedTable,
25
+ NormalizedTableIdMap,
26
+ NormalizedField,
27
+ NormalizedFieldIdMap,
28
+ NormalizedIndex,
29
+ NormalizedIndexIdMap,
30
+ NormalizedIndexColumn,
31
+ NormalizedIndexColumnIdMap,
32
+ NormalizedEnum,
33
+ NormalizedEnumIdMap,
34
+ NormalizedEnumValue,
35
+ NormalizedEnumValueIdMap,
36
+ NormalizedRef,
37
+ NormalizedRefIdMap,
38
+ NormalizedEndpoint,
39
+ NormalizedEndpointIdMap,
40
+ NormalizedTableGroup,
41
+ NormalizedTableGroupIdMap,
42
+ NormalizedNote,
43
+ NormalizedNoteIdMap,
44
+ NormalizedCheck,
45
+ NormalizedCheckIdMap,
46
+ NormalizedTablePartial,
47
+ NormalizedTablePartialIdMap,
48
+ Project,
49
+ RawDatabase,
50
+ TableRecord,
51
+ NormalizedRecords,
52
+ RawSchema,
53
+ } from './model_structure';
54
+ export { RecordValueType, RecordValue } from './model_structure/database';
55
+ export {
56
+ SqlDialect,
57
+ isIntegerType,
58
+ isFloatType,
59
+ isNumericType,
60
+ isBooleanType,
61
+ isStringType,
62
+ isBinaryType,
63
+ isDateTimeType,
64
+ isSerialType,
65
+ tryExtractBoolean,
66
+ tryExtractNumeric,
67
+ tryExtractInteger,
68
+ tryExtractString,
69
+ tryExtractDateTime,
70
+ tryExtractEnum,
71
+ addDoubleQuoteIfNeeded,
72
+ formatRecordValue,
73
+ } from '@dbml/parse';
@@ -1,4 +1,4 @@
1
- import { NormalizedDatabase } from './database';
1
+ import { NormalizedModel } from './database';
2
2
  import Element, { Token } from './element';
3
3
  import Field from './field';
4
4
  import Table from './table';
@@ -35,18 +35,20 @@ declare class Check extends Element {
35
35
  name: string;
36
36
  expression: string;
37
37
  };
38
- normalize(model: NormalizedDatabase): void;
38
+ normalize(model: NormalizedModel): void;
39
39
  }
40
40
 
41
41
  export interface NormalizedCheck {
42
- [_id: number]: {
43
- id: number;
44
- name: string;
45
- expression: string;
46
- tableId: number;
47
- columnId: number | null;
48
- injectedPartialId: number | null;
49
- };
42
+ id: number;
43
+ name: string;
44
+ expression: string;
45
+ tableId: number;
46
+ columnId: number | null;
47
+ injectedPartialId: number | null;
48
+ }
49
+
50
+ export interface NormalizedCheckIdMap {
51
+ [_id: number]: NormalizedCheck;
50
52
  }
51
53
 
52
54
  export default Check;
@@ -1,31 +1,38 @@
1
- import Schema, { NormalizedSchema, RawSchema } from './schema';
2
- import Ref, { NormalizedRef } from './ref';
3
- import Enum, { NormalizedEnum } from './enum';
4
- import TableGroup, { NormalizedTableGroup } from './tableGroup';
5
- import Table, { NormalizedTable } from './table';
6
- import StickyNote, { NormalizedStickyNote } from './stickyNote';
1
+ import Schema, { NormalizedSchemaIdMap, RawSchema } from './schema';
2
+ import Ref, { NormalizedRefIdMap } from './ref';
3
+ import Enum, { NormalizedEnumIdMap } from './enum';
4
+ import TableGroup, { NormalizedTableGroupIdMap } from './tableGroup';
5
+ import Table, { NormalizedTableIdMap } from './table';
6
+ import StickyNote, { NormalizedNoteIdMap } from './stickyNote';
7
7
  import Element, { RawNote, Token } from './element';
8
8
  import DbState from './dbState';
9
- import { NormalizedEndpoint } from './endpoint';
10
- import { NormalizedEnumValue } from './enumValue';
11
- import { NormalizedField } from './field';
12
- import { NormalizedIndexColumn } from './indexColumn';
13
- import { NormalizedIndex } from './indexes';
14
- import { NormalizedCheck } from './check';
15
- import TablePartial, { NormalizedTablePartial } from './tablePartial';
9
+ import { NormalizedEndpointIdMap } from './endpoint';
10
+ import { NormalizedEnumValueIdMap } from './enumValue';
11
+ import { NormalizedFieldIdMap } from './field';
12
+ import { NormalizedIndexColumnIdMap } from './indexColumn';
13
+ import { NormalizedIndexIdMap } from './indexes';
14
+ import { NormalizedCheckIdMap } from './check';
15
+ import TablePartial, { NormalizedTablePartialIdMap } from './tablePartial';
16
16
  export interface Project {
17
17
  note: RawNote;
18
18
  database_type: string;
19
19
  name: string;
20
20
  }
21
21
 
22
- interface RawTableRecord {
22
+ export type RecordValueType = 'string' | 'bool' | 'integer' | 'real' | 'date' | 'time' | 'datetime' | string;
23
+
24
+ export interface RecordValue {
25
+ value: any;
26
+ type: RecordValueType;
27
+ }
28
+
29
+ export interface RawTableRecord {
23
30
  schemaName: string | undefined;
24
31
  tableName: string;
25
32
  columns: string[];
26
33
  values: {
27
34
  value: any;
28
- type: string;
35
+ type: RecordValueType;
29
36
  }[][];
30
37
  }
31
38
 
@@ -276,33 +283,36 @@ declare class Database extends Element {
276
283
  schemaIds: number[];
277
284
  noteIds: number[];
278
285
  };
279
- normalize(): NormalizedDatabase;
286
+ normalize(): NormalizedModel;
280
287
  }
281
288
  export interface NormalizedDatabase {
282
- database: {
283
- [_id: number]: {
284
- id: number;
285
- hasDefaultSchema: boolean;
286
- note: string;
287
- databaseType: string;
288
- name: string;
289
- schemaIds: number[];
290
- noteIds: number[];
291
- };
292
- };
293
- schemas: NormalizedSchema;
294
- notes: NormalizedStickyNote;
295
- refs: NormalizedRef;
296
- enums: NormalizedEnum;
297
- tableGroups: NormalizedTableGroup;
298
- tables: NormalizedTable;
299
- endpoints: NormalizedEndpoint;
300
- enumValues: NormalizedEnumValue;
301
- indexes: NormalizedIndex;
302
- indexColumns: NormalizedIndexColumn;
303
- checks: NormalizedCheck;
304
- fields: NormalizedField;
305
- records: NormalizedRecords;
306
- tablePartials: NormalizedTablePartial;
289
+ id: number;
290
+ hasDefaultSchema: boolean;
291
+ note: string | null;
292
+ databaseType: string;
293
+ name: string;
294
+ schemaIds: number[];
295
+ noteIds: number[];
296
+ }
297
+
298
+ export interface NormalizedDatabaseIdMap {
299
+ [_id: number]: NormalizedDatabase;
300
+ }
301
+
302
+ export interface NormalizedModel {
303
+ database: NormalizedDatabaseIdMap;
304
+ schemas: NormalizedSchemaIdMap;
305
+ endpoints: NormalizedEndpointIdMap;
306
+ refs: NormalizedRefIdMap;
307
+ fields: NormalizedFieldIdMap;
308
+ tables: NormalizedTableIdMap;
309
+ tableGroups: NormalizedTableGroupIdMap;
310
+ enums: NormalizedEnumIdMap;
311
+ enumValues: NormalizedEnumValueIdMap;
312
+ indexes: NormalizedIndexIdMap;
313
+ indexColumns: NormalizedIndexColumnIdMap;
314
+ notes: NormalizedNoteIdMap;
315
+ checks: NormalizedCheckIdMap;
316
+ tablePartials: NormalizedTablePartialIdMap;
307
317
  }
308
318
  export default Database;
@@ -2,7 +2,17 @@ import Element from './element';
2
2
  import Field from './field';
3
3
  import Ref from './ref';
4
4
  import DbState from './dbState';
5
- import { NormalizedDatabase } from './database';
5
+ import { NormalizedModel } from './database';
6
+ import { Token } from 'antlr4';
7
+
8
+ export interface RawEndpoint {
9
+ schemaName: string | null;
10
+ tableName: string;
11
+ fieldNames: string[];
12
+ relation: '1' | '*';
13
+ token: Token;
14
+ }
15
+
6
16
  declare class Endpoint extends Element {
7
17
  relation: any;
8
18
  schemaName: string;
@@ -39,17 +49,20 @@ declare class Endpoint extends Element {
39
49
  relation: any;
40
50
  };
41
51
  setFields(fieldNames: any, table: any): void;
42
- normalize(model: NormalizedDatabase): void;
52
+ normalize(model: NormalizedModel): void;
43
53
  }
44
54
  export interface NormalizedEndpoint {
45
- [_id: number]: {
46
- id: number;
47
- schemaName: string;
48
- tableName: string;
49
- fieldNames: string[];
50
- relation: any;
51
- refId: number;
52
- fieldIds: number[];
53
- };
55
+ id: number;
56
+ schemaName: string | null;
57
+ tableName: string;
58
+ fieldNames: string[];
59
+ fieldIds: number[];
60
+ relation: string;
61
+ refId: number;
62
+ }
63
+
64
+ export interface NormalizedEndpointIdMap {
65
+ [_id: number]: NormalizedEndpoint;
54
66
  }
67
+
55
68
  export default Endpoint;