rawsql-ts 0.1.0-beta.3 → 0.1.0-beta.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +143 -199
- package/dist/index.js +32 -0
- package/dist/index.js.map +1 -0
- package/dist/models/BinarySelectQuery.js +140 -0
- package/dist/models/BinarySelectQuery.js.map +1 -0
- package/dist/models/Clause.js +318 -0
- package/dist/models/Clause.js.map +1 -0
- package/dist/models/KeywordTrie.js +52 -0
- package/dist/models/KeywordTrie.js.map +1 -0
- package/dist/models/Lexeme.js +21 -0
- package/dist/models/Lexeme.js.map +1 -0
- package/dist/models/SelectQuery.js +10 -0
- package/dist/models/SelectQuery.js.map +1 -0
- package/dist/models/SimpleSelectQuery.js +290 -0
- package/dist/models/SimpleSelectQuery.js.map +1 -0
- package/dist/models/SqlComponent.js +27 -0
- package/dist/models/SqlComponent.js.map +1 -0
- package/dist/models/ValueComponent.js +250 -0
- package/dist/models/ValueComponent.js.map +1 -0
- package/dist/models/ValuesQuery.js +16 -0
- package/dist/models/ValuesQuery.js.map +1 -0
- package/dist/parsers/CommandExpressionParser.js +124 -0
- package/dist/parsers/CommandExpressionParser.js.map +1 -0
- package/dist/parsers/CommonTableParser.js +60 -0
- package/dist/parsers/CommonTableParser.js.map +1 -0
- package/dist/parsers/ForClauseParser.js +56 -0
- package/dist/parsers/ForClauseParser.js.map +1 -0
- package/dist/parsers/FromClauseParser.js +45 -0
- package/dist/parsers/FromClauseParser.js.map +1 -0
- package/dist/parsers/FunctionExpressionParser.js +178 -0
- package/dist/parsers/FunctionExpressionParser.js.map +1 -0
- package/dist/parsers/GroupByParser.js +56 -0
- package/dist/parsers/GroupByParser.js.map +1 -0
- package/dist/parsers/HavingParser.js +34 -0
- package/dist/parsers/HavingParser.js.map +1 -0
- package/dist/parsers/IdentifierParser.js +39 -0
- package/dist/parsers/IdentifierParser.js.map +1 -0
- package/dist/parsers/JoinClauseParser.js +105 -0
- package/dist/parsers/JoinClauseParser.js.map +1 -0
- package/dist/parsers/KeywordParser.js +91 -0
- package/dist/parsers/KeywordParser.js.map +1 -0
- package/dist/parsers/LimitClauseParser.js +48 -0
- package/dist/parsers/LimitClauseParser.js.map +1 -0
- package/dist/parsers/LiteralParser.js +38 -0
- package/dist/parsers/LiteralParser.js.map +1 -0
- package/dist/parsers/OrderByClauseParser.js +75 -0
- package/dist/parsers/OrderByClauseParser.js.map +1 -0
- package/dist/parsers/OverExpressionParser.js +44 -0
- package/dist/parsers/OverExpressionParser.js.map +1 -0
- package/dist/parsers/ParameterExpressionParser.js +15 -0
- package/dist/parsers/ParameterExpressionParser.js.map +1 -0
- package/dist/parsers/ParenExpressionParser.js +33 -0
- package/dist/parsers/ParenExpressionParser.js.map +1 -0
- package/dist/parsers/PartitionByParser.js +51 -0
- package/dist/parsers/PartitionByParser.js.map +1 -0
- package/dist/parsers/SelectClauseParser.js +82 -0
- package/dist/parsers/SelectClauseParser.js.map +1 -0
- package/dist/parsers/SelectQueryParser.js +151 -0
- package/dist/parsers/SelectQueryParser.js.map +1 -0
- package/dist/parsers/SourceAliasExpressionParser.js +48 -0
- package/dist/parsers/SourceAliasExpressionParser.js.map +1 -0
- package/dist/parsers/SourceExpressionParser.js +34 -0
- package/dist/parsers/SourceExpressionParser.js.map +1 -0
- package/dist/parsers/SourceParser.js +116 -0
- package/dist/parsers/SourceParser.js.map +1 -0
- package/dist/parsers/SqlTokenizer.js +174 -0
- package/dist/parsers/SqlTokenizer.js.map +1 -0
- package/dist/parsers/StringSpecifierExpressionParser.js +22 -0
- package/dist/parsers/StringSpecifierExpressionParser.js.map +1 -0
- package/dist/parsers/UnaryExpressionParser.js +30 -0
- package/dist/parsers/UnaryExpressionParser.js.map +1 -0
- package/dist/parsers/ValueParser.js +134 -0
- package/dist/parsers/ValueParser.js.map +1 -0
- package/dist/parsers/ValuesQueryParser.js +86 -0
- package/dist/parsers/ValuesQueryParser.js.map +1 -0
- package/dist/parsers/WhereClauseParser.js +34 -0
- package/dist/parsers/WhereClauseParser.js.map +1 -0
- package/dist/parsers/WindowClauseParser.js +43 -0
- package/dist/parsers/WindowClauseParser.js.map +1 -0
- package/dist/parsers/WindowExpressionParser.js +151 -0
- package/dist/parsers/WindowExpressionParser.js.map +1 -0
- package/dist/parsers/WithClauseParser.js +55 -0
- package/dist/parsers/WithClauseParser.js.map +1 -0
- package/dist/tokenReaders/BaseTokenReader.js +82 -0
- package/dist/tokenReaders/BaseTokenReader.js.map +1 -0
- package/dist/tokenReaders/CommandTokenReader.js +145 -0
- package/dist/tokenReaders/CommandTokenReader.js.map +1 -0
- package/dist/tokenReaders/FunctionTokenReader.js +45 -0
- package/dist/tokenReaders/FunctionTokenReader.js.map +1 -0
- package/dist/tokenReaders/IdentifierTokenReader.js +70 -0
- package/dist/tokenReaders/IdentifierTokenReader.js.map +1 -0
- package/dist/tokenReaders/LiteralTokenReader.js +189 -0
- package/dist/tokenReaders/LiteralTokenReader.js.map +1 -0
- package/dist/tokenReaders/OperatorTokenReader.js +98 -0
- package/dist/tokenReaders/OperatorTokenReader.js.map +1 -0
- package/dist/tokenReaders/ParameterTokenReader.js +44 -0
- package/dist/tokenReaders/ParameterTokenReader.js.map +1 -0
- package/dist/tokenReaders/StringSpecifierTokenReader.js +31 -0
- package/dist/tokenReaders/StringSpecifierTokenReader.js.map +1 -0
- package/dist/tokenReaders/SymbolTokenReader.js +35 -0
- package/dist/tokenReaders/SymbolTokenReader.js.map +1 -0
- package/dist/tokenReaders/TokenReaderManager.js +110 -0
- package/dist/tokenReaders/TokenReaderManager.js.map +1 -0
- package/dist/tokenReaders/TypeTokenReader.js +59 -0
- package/dist/tokenReaders/TypeTokenReader.js.map +1 -0
- package/dist/transformers/CTEBuilder.js +188 -0
- package/dist/transformers/CTEBuilder.js.map +1 -0
- package/dist/transformers/CTECollector.js +384 -0
- package/dist/transformers/CTECollector.js.map +1 -0
- package/dist/transformers/CTEDisabler.js +325 -0
- package/dist/transformers/CTEDisabler.js.map +1 -0
- package/dist/transformers/CTEInjector.js +83 -0
- package/dist/transformers/CTEInjector.js.map +1 -0
- package/dist/transformers/CTENormalizer.js +42 -0
- package/dist/transformers/CTENormalizer.js.map +1 -0
- package/dist/transformers/Formatter.js +452 -0
- package/dist/transformers/Formatter.js.map +1 -0
- package/dist/transformers/QueryNormalizer.js +114 -0
- package/dist/transformers/QueryNormalizer.js.map +1 -0
- package/dist/transformers/SelectValueCollector.js +249 -0
- package/dist/transformers/SelectValueCollector.js.map +1 -0
- package/dist/transformers/SelectableColumnCollector.js +308 -0
- package/dist/transformers/SelectableColumnCollector.js.map +1 -0
- package/dist/transformers/TableSourceCollector.js +384 -0
- package/dist/transformers/TableSourceCollector.js.map +1 -0
- package/dist/transformers/UpstreamSelectQueryFinder.js +129 -0
- package/dist/transformers/UpstreamSelectQueryFinder.js.map +1 -0
- package/dist/utils/charLookupTable.js +73 -0
- package/dist/utils/charLookupTable.js.map +1 -0
- package/dist/utils/stringUtils.js +168 -0
- package/dist/utils/stringUtils.js.map +1 -0
- package/package.json +2 -2
- package/dist/tsconfig.tsbuildinfo +0 -1
- package/dist/vitest.config.js +0 -15
- package/dist/vitest.config.js.map +0 -1
@@ -0,0 +1,39 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.IdentifierParser = void 0;
|
4
|
+
const Lexeme_1 = require("../models/Lexeme");
|
5
|
+
const ValueComponent_1 = require("../models/ValueComponent");
|
6
|
+
class IdentifierParser {
|
7
|
+
static parse(lexemes, index) {
|
8
|
+
// Check for column reference pattern ([identifier dot] * n + identifier)
|
9
|
+
let idx = index;
|
10
|
+
const identifiers = [];
|
11
|
+
// Add the first identifier
|
12
|
+
identifiers.push(lexemes[idx].value);
|
13
|
+
idx++;
|
14
|
+
// Look for dot and identifier pattern
|
15
|
+
// support wildcard '*' as identifier (e.g. select t.* from t)
|
16
|
+
while (idx < lexemes.length &&
|
17
|
+
idx + 1 < lexemes.length &&
|
18
|
+
lexemes[idx].type === Lexeme_1.TokenType.Dot &&
|
19
|
+
(lexemes[idx + 1].type === Lexeme_1.TokenType.Identifier || lexemes[idx + 1].value === "*")) {
|
20
|
+
// Skip the dot and add the next identifier
|
21
|
+
idx++;
|
22
|
+
identifiers.push(lexemes[idx].value);
|
23
|
+
idx++;
|
24
|
+
}
|
25
|
+
if (identifiers.length > 1) {
|
26
|
+
// If there are multiple identifiers, treat it as a column reference
|
27
|
+
const lastIdentifier = identifiers.pop() || '';
|
28
|
+
const value = new ValueComponent_1.ColumnReference(identifiers, lastIdentifier);
|
29
|
+
return { value, newIndex: idx };
|
30
|
+
}
|
31
|
+
else {
|
32
|
+
// If there is a single identifier, treat it as a simple identifier
|
33
|
+
const value = new ValueComponent_1.ColumnReference(null, identifiers[0]);
|
34
|
+
return { value, newIndex: idx };
|
35
|
+
}
|
36
|
+
}
|
37
|
+
}
|
38
|
+
exports.IdentifierParser = IdentifierParser;
|
39
|
+
//# sourceMappingURL=IdentifierParser.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"IdentifierParser.js","sourceRoot":"","sources":["../../src/parsers/IdentifierParser.ts"],"names":[],"mappings":";;;AAAA,6CAAqD;AACrD,6DAA2E;AAE3E,MAAa,gBAAgB;IAClB,MAAM,CAAC,KAAK,CAAC,OAAiB,EAAE,KAAa;QAChD,yEAAyE;QACzE,IAAI,GAAG,GAAG,KAAK,CAAC;QAChB,MAAM,WAAW,GAAa,EAAE,CAAC;QAEjC,2BAA2B;QAC3B,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC;QACrC,GAAG,EAAE,CAAC;QAEN,sCAAsC;QACtC,8DAA8D;QAC9D,OACI,GAAG,GAAG,OAAO,CAAC,MAAM;YACpB,GAAG,GAAG,CAAC,GAAG,OAAO,CAAC,MAAM;YACxB,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK,kBAAS,CAAC,GAAG;YACnC,CAAC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,IAAI,KAAK,kBAAS,CAAC,UAAU,IAAI,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,KAAK,KAAK,GAAG,CAAC,EACpF,CAAC;YACC,2CAA2C;YAC3C,GAAG,EAAE,CAAC;YACN,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC;YACrC,GAAG,EAAE,CAAC;QACV,CAAC;QAED,IAAI,WAAW,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACzB,oEAAoE;YACpE,MAAM,cAAc,GAAG,WAAW,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC;YAC/C,MAAM,KAAK,GAAG,IAAI,gCAAe,CAAC,WAAW,EAAE,cAAc,CAAC,CAAC;YAC/D,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,GAAG,EAAE,CAAC;QACpC,CAAC;aAAM,CAAC;YACJ,mEAAmE;YACnE,MAAM,KAAK,GAAG,IAAI,gCAAe,CAAC,IAAI,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;YACxD,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,GAAG,EAAE,CAAC;QACpC,CAAC;IACL,CAAC;CACJ;AAnCD,4CAmCC"}
|
@@ -0,0 +1,105 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.JoinClauseParser = void 0;
|
4
|
+
const Clause_1 = require("../models/Clause");
|
5
|
+
const Lexeme_1 = require("../models/Lexeme");
|
6
|
+
const CommandTokenReader_1 = require("../tokenReaders/CommandTokenReader");
|
7
|
+
const SourceExpressionParser_1 = require("./SourceExpressionParser");
|
8
|
+
const ValueParser_1 = require("./ValueParser");
|
9
|
+
class JoinClauseParser {
|
10
|
+
static tryParse(lexemes, index) {
|
11
|
+
let idx = index;
|
12
|
+
const joins = [];
|
13
|
+
while (this.isJoinCommand(lexemes, idx)) {
|
14
|
+
const joinClause = this.parseJoinClause(lexemes, idx);
|
15
|
+
joins.push(joinClause.value);
|
16
|
+
idx = joinClause.newIndex;
|
17
|
+
}
|
18
|
+
if (joins.length > 0) {
|
19
|
+
return { value: joins, newIndex: idx };
|
20
|
+
}
|
21
|
+
return null;
|
22
|
+
}
|
23
|
+
static isJoinKeyword(value) {
|
24
|
+
// Although performance is not ideal,
|
25
|
+
// we use keyword token reader to centralize keyword management
|
26
|
+
const result = CommandTokenReader_1.joinkeywordParser.parse(value, 0);
|
27
|
+
if (result) {
|
28
|
+
return true;
|
29
|
+
}
|
30
|
+
return false;
|
31
|
+
}
|
32
|
+
static parseLateral(lexemes, index) {
|
33
|
+
let idx = index;
|
34
|
+
if (idx < lexemes.length && lexemes[idx].value === 'lateral') {
|
35
|
+
// Skip 'lateral' keyword
|
36
|
+
idx++;
|
37
|
+
return { value: true, newIndex: idx };
|
38
|
+
}
|
39
|
+
return { value: false, newIndex: idx };
|
40
|
+
}
|
41
|
+
static isJoinCommand(lexemes, index) {
|
42
|
+
if (index >= lexemes.length) {
|
43
|
+
return false;
|
44
|
+
}
|
45
|
+
if (lexemes[index].type === Lexeme_1.TokenType.Comma || this.isJoinKeyword(lexemes[index].value) === true) {
|
46
|
+
return true;
|
47
|
+
}
|
48
|
+
return false;
|
49
|
+
}
|
50
|
+
static parseJoinClause(lexemes, index) {
|
51
|
+
let idx = index;
|
52
|
+
// Get the join type
|
53
|
+
const joinType = lexemes[idx].value === "," ? "cross join" : lexemes[idx].value;
|
54
|
+
idx++;
|
55
|
+
// Check for lateral join
|
56
|
+
const lateralResult = this.parseLateral(lexemes, idx);
|
57
|
+
const lateral = lateralResult.value;
|
58
|
+
idx = lateralResult.newIndex;
|
59
|
+
// Parse the source expression to join with
|
60
|
+
const sourceResult = SourceExpressionParser_1.SourceExpressionParser.parse(lexemes, idx);
|
61
|
+
idx = sourceResult.newIndex;
|
62
|
+
if (idx < lexemes.length) {
|
63
|
+
let result = this.tryParseJoinOn(lexemes, idx, joinType, sourceResult.value, lateral);
|
64
|
+
if (result) {
|
65
|
+
return { value: result.value, newIndex: result.newIndex };
|
66
|
+
}
|
67
|
+
result = this.tryParseJoinUsing(lexemes, idx, joinType, sourceResult.value, lateral);
|
68
|
+
if (result) {
|
69
|
+
return { value: result.value, newIndex: result.newIndex };
|
70
|
+
}
|
71
|
+
}
|
72
|
+
// If we reach the end of the input, we can treat it as a natural join
|
73
|
+
const joinClause = new Clause_1.JoinClause(joinType, sourceResult.value, null, lateral);
|
74
|
+
return { value: joinClause, newIndex: idx };
|
75
|
+
}
|
76
|
+
static tryParseJoinOn(lexemes, index, joinType, source, lateral) {
|
77
|
+
let idx = index;
|
78
|
+
if (idx < lexemes.length && lexemes[idx].value === 'on') {
|
79
|
+
idx++; // Skip 'on' keyword
|
80
|
+
// Parse the condition expression
|
81
|
+
const condition = ValueParser_1.ValueParser.parse(lexemes, idx);
|
82
|
+
idx = condition.newIndex;
|
83
|
+
const joinOn = new Clause_1.JoinOnClause(condition.value);
|
84
|
+
const joinClause = new Clause_1.JoinClause(joinType, source, joinOn, lateral);
|
85
|
+
return { value: joinClause, newIndex: condition.newIndex };
|
86
|
+
}
|
87
|
+
return null;
|
88
|
+
}
|
89
|
+
static tryParseJoinUsing(lexemes, index, joinType, source, lateral) {
|
90
|
+
let idx = index;
|
91
|
+
if (idx < lexemes.length && lexemes[idx].value === 'using') {
|
92
|
+
idx++; // Skip 'using' keyword
|
93
|
+
// Parse the columns in parentheses
|
94
|
+
const result = ValueParser_1.ValueParser.parseArgument(Lexeme_1.TokenType.OpenParen, Lexeme_1.TokenType.CloseParen, lexemes, idx);
|
95
|
+
const usingColumns = result.value;
|
96
|
+
idx = result.newIndex;
|
97
|
+
const joinUsing = new Clause_1.JoinUsingClause(usingColumns);
|
98
|
+
const joinClause = new Clause_1.JoinClause(joinType, source, joinUsing, lateral);
|
99
|
+
return { value: joinClause, newIndex: result.newIndex };
|
100
|
+
}
|
101
|
+
return null;
|
102
|
+
}
|
103
|
+
}
|
104
|
+
exports.JoinClauseParser = JoinClauseParser;
|
105
|
+
//# sourceMappingURL=JoinClauseParser.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"JoinClauseParser.js","sourceRoot":"","sources":["../../src/parsers/JoinClauseParser.ts"],"names":[],"mappings":";;;AAAA,6CAA+F;AAC/F,6CAAqD;AACrD,2EAAuE;AACvE,qEAAkE;AAClE,+CAA4C;AAE5C,MAAa,gBAAgB;IAClB,MAAM,CAAC,QAAQ,CAAC,OAAiB,EAAE,KAAa;QACnD,IAAI,GAAG,GAAG,KAAK,CAAC;QAChB,MAAM,KAAK,GAAiB,EAAE,CAAC;QAE/B,OAAO,IAAI,CAAC,aAAa,CAAC,OAAO,EAAE,GAAG,CAAC,EAAE,CAAC;YACtC,MAAM,UAAU,GAAG,IAAI,CAAC,eAAe,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YACtD,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;YAC7B,GAAG,GAAG,UAAU,CAAC,QAAQ,CAAC;QAC9B,CAAC;QAED,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACnB,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,QAAQ,EAAE,GAAG,EAAE,CAAC;QAC3C,CAAC;QACD,OAAO,IAAI,CAAC;IAChB,CAAC;IAEO,MAAM,CAAC,aAAa,CAAC,KAAa;QACtC,qCAAqC;QACrC,+DAA+D;QAC/D,MAAM,MAAM,GAAG,sCAAiB,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;QACjD,IAAI,MAAM,EAAE,CAAC;YACT,OAAO,IAAI,CAAC;QAChB,CAAC;QACD,OAAO,KAAK,CAAC;IACjB,CAAC;IAEO,MAAM,CAAC,YAAY,CAAC,OAAiB,EAAE,KAAa;QACxD,IAAI,GAAG,GAAG,KAAK,CAAC;QAEhB,IAAI,GAAG,GAAG,OAAO,CAAC,MAAM,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,KAAK,SAAS,EAAE,CAAC;YAC3D,yBAAyB;YACzB,GAAG,EAAE,CAAC;YACN,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,EAAE,GAAG,EAAE,CAAC;QAC1C,CAAC;QAED,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,QAAQ,EAAE,GAAG,EAAE,CAAC;IAC3C,CAAC;IAEO,MAAM,CAAC,aAAa,CAAC,OAAiB,EAAE,KAAa;QACzD,IAAI,KAAK,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;YAC1B,OAAO,KAAK,CAAC;QACjB,CAAC;QAED,IAAI,OAAO,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,kBAAS,CAAC,KAAK,IAAI,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,KAAK,IAAI,EAAE,CAAC;YAC/F,OAAO,IAAI,CAAC;QAChB,CAAC;QACD,OAAO,KAAK,CAAC;IACjB,CAAC;IAEO,MAAM,CAAC,eAAe,CAAC,OAAiB,EAAE,KAAa;QAC3D,IAAI,GAAG,GAAG,KAAK,CAAC;QAEhB,oBAAoB;QACpB,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,KAAK,GAAG,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC;QAChF,GAAG,EAAE,CAAC;QAEN,yBAAyB;QACzB,MAAM,aAAa,GAAG,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;QACtD,MAAM,OAAO,GAAG,aAAa,CAAC,KAAK,CAAC;QACpC,GAAG,GAAG,aAAa,CAAC,QAAQ,CAAC;QAE7B,2CAA2C;QAC3C,MAAM,YAAY,GAAG,+CAAsB,CAAC,KAAK,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;QAChE,GAAG,GAAG,YAAY,CAAC,QAAQ,CAAC;QAE5B,IAAI,GAAG,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;YACvB,IAAI,MAAM,GAAG,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE,GAAG,EAAE,QAAQ,EAAE,YAAY,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;YACtF,IAAI,MAAM,EAAE,CAAC;gBACT,OAAO,EAAE,KAAK,EAAE,MAAM,CAAC,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,QAAQ,EAAE,CAAC;YAC9D,CAAC;YACD,MAAM,GAAG,IAAI,CAAC,iBAAiB,CAAC,OAAO,EAAE,GAAG,EAAE,QAAQ,EAAE,YAAY,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;YACrF,IAAI,MAAM,EAAE,CAAC;gBACT,OAAO,EAAE,KAAK,EAAE,MAAM,CAAC,KAAK,EAAE,QAAQ,EAAE,MAAM,CAAC,QAAQ,EAAE,CAAC;YAC9D,CAAC;QACL,CAAC;QAED,sEAAsE;QACtE,MAAM,UAAU,GAAG,IAAI,mBAAU,CAAC,QAAQ,EAAE,YAAY,CAAC,KAAK,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;QAC/E,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,QAAQ,EAAE,GAAG,EAAE,CAAC;IAChD,CAAC;IAEO,MAAM,CAAC,cAAc,CAAC,OAAiB,EAAE,KAAa,EAAE,QAAgB,EAAE,MAAwB,EAAE,OAAgB;QACxH,IAAI,GAAG,GAAG,KAAK,CAAC;QAChB,IAAI,GAAG,GAAG,OAAO,CAAC,MAAM,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,KAAK,IAAI,EAAE,CAAC;YACtD,GAAG,EAAE,CAAC,CAAC,oBAAoB;YAE3B,iCAAiC;YACjC,MAAM,SAAS,GAAG,yBAAW,CAAC,KAAK,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YAClD,GAAG,GAAG,SAAS,CAAC,QAAQ,CAAC;YACzB,MAAM,MAAM,GAAG,IAAI,qBAAY,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;YACjD,MAAM,UAAU,GAAG,IAAI,mBAAU,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC;YACrE,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,QAAQ,EAAE,SAAS,CAAC,QAAQ,EAAE,CAAC;QAC/D,CAAC;QACD,OAAO,IAAI,CAAC;IAChB,CAAC;IAEO,MAAM,CAAC,iBAAiB,CAAC,OAAiB,EAAE,KAAa,EAAE,QAAgB,EAAE,MAAwB,EAAE,OAAgB;QAC3H,IAAI,GAAG,GAAG,KAAK,CAAC;QAChB,IAAI,GAAG,GAAG,OAAO,CAAC,MAAM,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,KAAK,OAAO,EAAE,CAAC;YACzD,GAAG,EAAE,CAAC,CAAC,uBAAuB;YAE9B,mCAAmC;YACnC,MAAM,MAAM,GAAG,yBAAW,CAAC,aAAa,CAAC,kBAAS,CAAC,SAAS,EAAE,kBAAS,CAAC,UAAU,EAAE,OAAO,EAAE,GAAG,CAAC,CAAC;YAClG,MAAM,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC;YAClC,GAAG,GAAG,MAAM,CAAC,QAAQ,CAAC;YACtB,MAAM,SAAS,GAAG,IAAI,wBAAe,CAAC,YAAY,CAAC,CAAC;YACpD,MAAM,UAAU,GAAG,IAAI,mBAAU,CAAC,QAAQ,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,CAAC,CAAC;YACxE,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,CAAC,QAAQ,EAAE,CAAC;QAC5D,CAAC;QACD,OAAO,IAAI,CAAC;IAChB,CAAC;CACJ;AAhHD,4CAgHC"}
|
@@ -0,0 +1,91 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.KeywordParser = exports.KeywordMatchResult = void 0;
|
4
|
+
const stringUtils_1 = require("../utils/stringUtils");
|
5
|
+
var KeywordMatchResult;
|
6
|
+
(function (KeywordMatchResult) {
|
7
|
+
KeywordMatchResult[KeywordMatchResult["NotAKeyword"] = 0] = "NotAKeyword";
|
8
|
+
KeywordMatchResult[KeywordMatchResult["PartialOnly"] = 1] = "PartialOnly";
|
9
|
+
KeywordMatchResult[KeywordMatchResult["PartialOrFinal"] = 2] = "PartialOrFinal";
|
10
|
+
KeywordMatchResult[KeywordMatchResult["Final"] = 3] = "Final"; // "Complete match (no longer keywords after this)"
|
11
|
+
})(KeywordMatchResult || (exports.KeywordMatchResult = KeywordMatchResult = {}));
|
12
|
+
class KeywordParser {
|
13
|
+
constructor(trie) {
|
14
|
+
this.trie = trie;
|
15
|
+
}
|
16
|
+
isEndOfInput(input, position, shift = 0) {
|
17
|
+
return position + shift >= input.length;
|
18
|
+
}
|
19
|
+
canParse(input, position, shift = 0) {
|
20
|
+
return !this.isEndOfInput(input, position, shift);
|
21
|
+
}
|
22
|
+
parse(input, position) {
|
23
|
+
if (this.isEndOfInput(input, position)) {
|
24
|
+
return null;
|
25
|
+
}
|
26
|
+
// reset trie node
|
27
|
+
this.trie.reset();
|
28
|
+
const result = stringUtils_1.StringUtils.tryReadRegularIdentifier(input, position);
|
29
|
+
if (result === null) {
|
30
|
+
return null;
|
31
|
+
}
|
32
|
+
let matchResult = this.trie.pushLexeme(result.identifier.toLowerCase());
|
33
|
+
if (matchResult === KeywordMatchResult.NotAKeyword) {
|
34
|
+
return null;
|
35
|
+
}
|
36
|
+
if (matchResult === KeywordMatchResult.Final) {
|
37
|
+
return {
|
38
|
+
keyword: result.identifier,
|
39
|
+
newPosition: result.newPosition
|
40
|
+
};
|
41
|
+
}
|
42
|
+
// multi-word keyword
|
43
|
+
let lexeme = result.identifier;
|
44
|
+
position = stringUtils_1.StringUtils.readWhiteSpaceAndComment(input, result.newPosition).position;
|
45
|
+
// end of input
|
46
|
+
if (this.isEndOfInput(input, position)) {
|
47
|
+
if (matchResult === KeywordMatchResult.PartialOrFinal) {
|
48
|
+
// if the last match was partial or final, it means that the keyword is finished
|
49
|
+
return {
|
50
|
+
keyword: lexeme,
|
51
|
+
newPosition: position
|
52
|
+
};
|
53
|
+
}
|
54
|
+
else {
|
55
|
+
return null;
|
56
|
+
}
|
57
|
+
}
|
58
|
+
while (this.canParse(input, position)) {
|
59
|
+
const previousMatchResult = matchResult;
|
60
|
+
const result = stringUtils_1.StringUtils.tryReadRegularIdentifier(input, position);
|
61
|
+
if (result !== null) {
|
62
|
+
matchResult = this.trie.pushLexeme(result.identifier.toLowerCase());
|
63
|
+
if (matchResult === KeywordMatchResult.NotAKeyword) {
|
64
|
+
if (previousMatchResult === KeywordMatchResult.PartialOrFinal) {
|
65
|
+
break;
|
66
|
+
}
|
67
|
+
else {
|
68
|
+
return null;
|
69
|
+
}
|
70
|
+
}
|
71
|
+
lexeme += ' ' + result.identifier;
|
72
|
+
position = stringUtils_1.StringUtils.readWhiteSpaceAndComment(input, result.newPosition).position;
|
73
|
+
if (matchResult === KeywordMatchResult.Final) {
|
74
|
+
break;
|
75
|
+
}
|
76
|
+
}
|
77
|
+
else if (previousMatchResult === KeywordMatchResult.PartialOrFinal) {
|
78
|
+
break;
|
79
|
+
}
|
80
|
+
else {
|
81
|
+
return null;
|
82
|
+
}
|
83
|
+
}
|
84
|
+
return {
|
85
|
+
keyword: lexeme,
|
86
|
+
newPosition: position
|
87
|
+
};
|
88
|
+
}
|
89
|
+
}
|
90
|
+
exports.KeywordParser = KeywordParser;
|
91
|
+
//# sourceMappingURL=KeywordParser.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"KeywordParser.js","sourceRoot":"","sources":["../../src/parsers/KeywordParser.ts"],"names":[],"mappings":";;;AACA,sDAAmD;AAEnD,IAAY,kBAKX;AALD,WAAY,kBAAkB;IAC1B,yEAAW,CAAA;IACX,yEAAW,CAAA;IACX,+EAAc,CAAA;IACd,6DAAK,CAAA,CAAY,mDAAmD;AACxE,CAAC,EALW,kBAAkB,kCAAlB,kBAAkB,QAK7B;AAED,MAAa,aAAa;IAGtB,YAAY,IAAiB;QACzB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;IACrB,CAAC;IAEO,YAAY,CAAC,KAAa,EAAE,QAAgB,EAAE,QAAgB,CAAC;QACnE,OAAO,QAAQ,GAAG,KAAK,IAAI,KAAK,CAAC,MAAM,CAAC;IAC5C,CAAC;IAEO,QAAQ,CAAC,KAAa,EAAE,QAAgB,EAAE,QAAgB,CAAC;QAC/D,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,KAAK,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC;IACtD,CAAC;IAEM,KAAK,CAAC,KAAa,EAAE,QAAgB;QACxC,IAAI,IAAI,CAAC,YAAY,CAAC,KAAK,EAAE,QAAQ,CAAC,EAAE,CAAC;YACrC,OAAO,IAAI,CAAC;QAChB,CAAC;QAED,kBAAkB;QAClB,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,CAAC;QAClB,MAAM,MAAM,GAAG,yBAAW,CAAC,wBAAwB,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;QAErE,IAAI,MAAM,KAAK,IAAI,EAAE,CAAC;YAClB,OAAO,IAAI,CAAC;QAChB,CAAC;QAED,IAAI,WAAW,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC,WAAW,EAAE,CAAC,CAAC;QAExE,IAAI,WAAW,KAAK,kBAAkB,CAAC,WAAW,EAAE,CAAC;YACjD,OAAO,IAAI,CAAC;QAChB,CAAC;QAED,IAAI,WAAW,KAAK,kBAAkB,CAAC,KAAK,EAAE,CAAC;YAC3C,OAAO;gBACH,OAAO,EAAE,MAAM,CAAC,UAAU;gBAC1B,WAAW,EAAE,MAAM,CAAC,WAAW;aAClC,CAAC;QACN,CAAC;QAED,qBAAqB;QACrB,IAAI,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC;QAC/B,QAAQ,GAAG,yBAAW,CAAC,wBAAwB,CAAC,KAAK,EAAE,MAAM,CAAC,WAAW,CAAC,CAAC,QAAQ,CAAC;QAEpF,eAAe;QACf,IAAI,IAAI,CAAC,YAAY,CAAC,KAAK,EAAE,QAAQ,CAAC,EAAE,CAAC;YACrC,IAAI,WAAW,KAAK,kBAAkB,CAAC,cAAc,EAAE,CAAC;gBACpD,gFAAgF;gBAChF,OAAO;oBACH,OAAO,EAAE,MAAM;oBACf,WAAW,EAAE,QAAQ;iBACxB,CAAC;YACN,CAAC;iBAAM,CAAC;gBAEJ,OAAO,IAAI,CAAC;YAChB,CAAC;QACL,CAAC;QAED,OAAO,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,QAAQ,CAAC,EAAE,CAAC;YACpC,MAAM,mBAAmB,GAAG,WAAW,CAAC;YAExC,MAAM,MAAM,GAAG,yBAAW,CAAC,wBAAwB,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;YAErE,IAAI,MAAM,KAAK,IAAI,EAAE,CAAC;gBAClB,WAAW,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,UAAU,CAAC,WAAW,EAAE,CAAC,CAAC;gBAEpE,IAAI,WAAW,KAAK,kBAAkB,CAAC,WAAW,EAAE,CAAC;oBACjD,IAAI,mBAAmB,KAAK,kBAAkB,CAAC,cAAc,EAAE,CAAC;wBAC5D,MAAM;oBACV,CAAC;yBAAM,CAAC;wBACJ,OAAO,IAAI,CAAC;oBAChB,CAAC;gBACL,CAAC;gBAED,MAAM,IAAI,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC;gBAClC,QAAQ,GAAG,yBAAW,CAAC,wBAAwB,CAAC,KAAK,EAAE,MAAM,CAAC,WAAW,CAAC,CAAC,QAAQ,CAAC;gBAEpF,IAAI,WAAW,KAAK,kBAAkB,CAAC,KAAK,EAAE,CAAC;oBAC3C,MAAM;gBACV,CAAC;YACL,CAAC;iBAAM,IAAI,mBAAmB,KAAK,kBAAkB,CAAC,cAAc,EAAE,CAAC;gBACnE,MAAM;YACV,CAAC;iBAAM,CAAC;gBACJ,OAAO,IAAI,CAAC;YAChB,CAAC;QACL,CAAC;QAED,OAAO;YACH,OAAO,EAAE,MAAM;YACf,WAAW,EAAE,QAAQ;SACxB,CAAC;IACN,CAAC;CACJ;AA7FD,sCA6FC"}
|
@@ -0,0 +1,48 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.LimitClauseParser = void 0;
|
4
|
+
const Clause_1 = require("../models/Clause");
|
5
|
+
const SqlTokenizer_1 = require("./SqlTokenizer");
|
6
|
+
const ValueParser_1 = require("./ValueParser");
|
7
|
+
class LimitClauseParser {
|
8
|
+
static parseFromText(query) {
|
9
|
+
const tokenizer = new SqlTokenizer_1.SqlTokenizer(query); // Initialize tokenizer
|
10
|
+
const lexemes = tokenizer.readLexmes(); // Get tokens
|
11
|
+
// Parse
|
12
|
+
const result = this.parse(lexemes, 0);
|
13
|
+
// Error if there are remaining tokens
|
14
|
+
if (result.newIndex < lexemes.length) {
|
15
|
+
throw new Error(`Syntax error: Unexpected token "${lexemes[result.newIndex].value}" at position ${result.newIndex}. The LIMIT clause is complete but there are additional tokens.`);
|
16
|
+
}
|
17
|
+
return result.value;
|
18
|
+
}
|
19
|
+
static parse(lexemes, index) {
|
20
|
+
let idx = index;
|
21
|
+
if (lexemes[idx].value !== 'limit') {
|
22
|
+
throw new Error(`Syntax error at position ${idx}: Expected 'LIMIT' keyword but found "${lexemes[idx].value}". LIMIT clauses must start with the LIMIT keyword.`);
|
23
|
+
}
|
24
|
+
idx++;
|
25
|
+
if (idx >= lexemes.length) {
|
26
|
+
throw new Error(`Syntax error: Unexpected end of input after 'LIMIT' keyword. The LIMIT clause requires a numeric expression.`);
|
27
|
+
}
|
28
|
+
// Parse LIMIT value
|
29
|
+
const limitItem = ValueParser_1.ValueParser.parse(lexemes, idx);
|
30
|
+
idx = limitItem.newIndex;
|
31
|
+
let offsetItem = null;
|
32
|
+
// Check if there is an OFFSET clause
|
33
|
+
if (idx < lexemes.length && lexemes[idx].value === 'offset') {
|
34
|
+
idx++;
|
35
|
+
if (idx >= lexemes.length) {
|
36
|
+
throw new Error(`Syntax error: Unexpected end of input after 'OFFSET' keyword. The OFFSET clause requires a numeric expression.`);
|
37
|
+
}
|
38
|
+
// Parse OFFSET value
|
39
|
+
const offsetValueItem = ValueParser_1.ValueParser.parse(lexemes, idx);
|
40
|
+
offsetItem = offsetValueItem.value;
|
41
|
+
idx = offsetValueItem.newIndex;
|
42
|
+
}
|
43
|
+
const clause = new Clause_1.LimitClause(limitItem.value, offsetItem);
|
44
|
+
return { value: clause, newIndex: idx };
|
45
|
+
}
|
46
|
+
}
|
47
|
+
exports.LimitClauseParser = LimitClauseParser;
|
48
|
+
//# sourceMappingURL=LimitClauseParser.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"LimitClauseParser.js","sourceRoot":"","sources":["../../src/parsers/LimitClauseParser.ts"],"names":[],"mappings":";;;AAAA,6CAA8D;AAE9D,iDAA8C;AAC9C,+CAA4C;AAE5C,MAAa,iBAAiB;IACnB,MAAM,CAAC,aAAa,CAAC,KAAa;QACrC,MAAM,SAAS,GAAG,IAAI,2BAAY,CAAC,KAAK,CAAC,CAAC,CAAC,uBAAuB;QAClE,MAAM,OAAO,GAAG,SAAS,CAAC,UAAU,EAAE,CAAC,CAAC,aAAa;QAErD,QAAQ;QACR,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;QAEtC,sCAAsC;QACtC,IAAI,MAAM,CAAC,QAAQ,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;YACnC,MAAM,IAAI,KAAK,CAAC,mCAAmC,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,KAAK,iBAAiB,MAAM,CAAC,QAAQ,iEAAiE,CAAC,CAAC;QACxL,CAAC;QAED,OAAO,MAAM,CAAC,KAAK,CAAC;IACxB,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,OAAiB,EAAE,KAAa;QAChD,IAAI,GAAG,GAAG,KAAK,CAAC;QAEhB,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,KAAK,OAAO,EAAE,CAAC;YACjC,MAAM,IAAI,KAAK,CAAC,4BAA4B,GAAG,yCAAyC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,qDAAqD,CAAC,CAAC;QACrK,CAAC;QACD,GAAG,EAAE,CAAC;QAEN,IAAI,GAAG,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;YACxB,MAAM,IAAI,KAAK,CAAC,8GAA8G,CAAC,CAAC;QACpI,CAAC;QAED,oBAAoB;QACpB,MAAM,SAAS,GAAG,yBAAW,CAAC,KAAK,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;QAClD,GAAG,GAAG,SAAS,CAAC,QAAQ,CAAC;QAEzB,IAAI,UAAU,GAAG,IAAI,CAAC;QAEtB,qCAAqC;QACrC,IAAI,GAAG,GAAG,OAAO,CAAC,MAAM,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,KAAK,QAAQ,EAAE,CAAC;YAC1D,GAAG,EAAE,CAAC;YAEN,IAAI,GAAG,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;gBACxB,MAAM,IAAI,KAAK,CAAC,gHAAgH,CAAC,CAAC;YACtI,CAAC;YAED,qBAAqB;YACrB,MAAM,eAAe,GAAG,yBAAW,CAAC,KAAK,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YACxD,UAAU,GAAG,eAAe,CAAC,KAAK,CAAC;YACnC,GAAG,GAAG,eAAe,CAAC,QAAQ,CAAC;QACnC,CAAC;QAED,MAAM,MAAM,GAAG,IAAI,oBAAW,CAAC,SAAS,CAAC,KAAK,EAAE,UAAU,CAAC,CAAC;QAE5D,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,GAAG,EAAE,CAAC;IAC5C,CAAC;CACJ;AApDD,8CAoDC"}
|
@@ -0,0 +1,38 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.LiteralParser = void 0;
|
4
|
+
const ValueComponent_1 = require("../models/ValueComponent");
|
5
|
+
const LiteralTokenReader_1 = require("../tokenReaders/LiteralTokenReader");
|
6
|
+
class LiteralParser {
|
7
|
+
static parse(lexemes, index) {
|
8
|
+
// Process literal value
|
9
|
+
let idx = index;
|
10
|
+
const valueText = lexemes[idx].value;
|
11
|
+
let parsedValue;
|
12
|
+
const lex = LiteralTokenReader_1.literalKeywordParser.parse(valueText.toLowerCase(), 0);
|
13
|
+
if (lex) {
|
14
|
+
const value = new ValueComponent_1.RawString(lex.keyword);
|
15
|
+
idx++;
|
16
|
+
return { value, newIndex: idx };
|
17
|
+
}
|
18
|
+
// Check if it is a number
|
19
|
+
if (/^[+-]?\d+(\.\d+)?([eE][+-]?\d+)?$/.test(valueText)) {
|
20
|
+
parsedValue = Number(valueText);
|
21
|
+
}
|
22
|
+
// Otherwise, treat it as a string
|
23
|
+
else {
|
24
|
+
// Remove single quotes if enclosed
|
25
|
+
if (valueText.startsWith("'") && valueText.endsWith("'")) {
|
26
|
+
parsedValue = valueText.slice(1, -1);
|
27
|
+
}
|
28
|
+
else {
|
29
|
+
parsedValue = valueText;
|
30
|
+
}
|
31
|
+
}
|
32
|
+
idx++;
|
33
|
+
const value = new ValueComponent_1.LiteralValue(parsedValue);
|
34
|
+
return { value, newIndex: idx };
|
35
|
+
}
|
36
|
+
}
|
37
|
+
exports.LiteralParser = LiteralParser;
|
38
|
+
//# sourceMappingURL=LiteralParser.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"LiteralParser.js","sourceRoot":"","sources":["../../src/parsers/LiteralParser.ts"],"names":[],"mappings":";;;AACA,6DAAmF;AACnF,2EAA0E;AAE1E,MAAa,aAAa;IACf,MAAM,CAAC,KAAK,CAAC,OAAiB,EAAE,KAAa;QAChD,wBAAwB;QACxB,IAAI,GAAG,GAAG,KAAK,CAAC;QAChB,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC;QACrC,IAAI,WAA6C,CAAC;QAElD,MAAM,GAAG,GAAG,yCAAoB,CAAC,KAAK,CAAC,SAAS,CAAC,WAAW,EAAE,EAAE,CAAC,CAAC,CAAC;QACnE,IAAI,GAAG,EAAE,CAAC;YACN,MAAM,KAAK,GAAG,IAAI,0BAAS,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;YACzC,GAAG,EAAE,CAAA;YACL,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,GAAG,EAAE,CAAC;QACpC,CAAC;QAED,0BAA0B;QAC1B,IAAI,mCAAmC,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC;YACtD,WAAW,GAAG,MAAM,CAAC,SAAS,CAAC,CAAC;QACpC,CAAC;QACD,kCAAkC;aAC7B,CAAC;YACF,mCAAmC;YACnC,IAAI,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC;gBACvD,WAAW,GAAG,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;YACzC,CAAC;iBAAM,CAAC;gBACJ,WAAW,GAAG,SAAS,CAAC;YAC5B,CAAC;QACL,CAAC;QACD,GAAG,EAAE,CAAA;QACL,MAAM,KAAK,GAAG,IAAI,6BAAY,CAAC,WAAW,CAAC,CAAC;QAC5C,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,GAAG,EAAE,CAAC;IACpC,CAAC;CACJ;AA/BD,sCA+BC"}
|
@@ -0,0 +1,75 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.OrderByClauseParser = void 0;
|
4
|
+
const Clause_1 = require("../models/Clause");
|
5
|
+
const Lexeme_1 = require("../models/Lexeme");
|
6
|
+
const SqlTokenizer_1 = require("./SqlTokenizer");
|
7
|
+
const ValueParser_1 = require("./ValueParser");
|
8
|
+
class OrderByClauseParser {
|
9
|
+
static parseFromText(query) {
|
10
|
+
const tokenizer = new SqlTokenizer_1.SqlTokenizer(query); // Initialize tokenizer
|
11
|
+
const lexemes = tokenizer.readLexmes(); // Get tokens
|
12
|
+
// Parse
|
13
|
+
const result = this.parse(lexemes, 0);
|
14
|
+
// Error if there are remaining tokens
|
15
|
+
if (result.newIndex < lexemes.length) {
|
16
|
+
throw new Error(`Syntax error: Unexpected token "${lexemes[result.newIndex].value}" at position ${result.newIndex}. The ORDER BY clause is complete but there are additional tokens.`);
|
17
|
+
}
|
18
|
+
return result.value;
|
19
|
+
}
|
20
|
+
static parse(lexemes, index) {
|
21
|
+
let idx = index;
|
22
|
+
if (lexemes[idx].value !== 'order by') {
|
23
|
+
throw new Error(`Syntax error at position ${idx}: Expected 'ORDER BY' keyword but found "${lexemes[idx].value}". ORDER BY clauses must start with the ORDER BY keywords.`);
|
24
|
+
}
|
25
|
+
idx++;
|
26
|
+
const items = [];
|
27
|
+
const item = this.parseItem(lexemes, idx);
|
28
|
+
items.push(item.value);
|
29
|
+
idx = item.newIndex;
|
30
|
+
while (idx < lexemes.length && lexemes[idx].type === Lexeme_1.TokenType.Comma) {
|
31
|
+
idx++;
|
32
|
+
const item = this.parseItem(lexemes, idx);
|
33
|
+
items.push(item.value);
|
34
|
+
idx = item.newIndex;
|
35
|
+
}
|
36
|
+
if (items.length === 0) {
|
37
|
+
throw new Error(`Syntax error at position ${index}: No ordering expressions found. The ORDER BY clause requires at least one expression to order by.`);
|
38
|
+
}
|
39
|
+
else {
|
40
|
+
const clause = new Clause_1.OrderByClause(items);
|
41
|
+
return { value: clause, newIndex: idx };
|
42
|
+
}
|
43
|
+
}
|
44
|
+
static parseItem(lexemes, index) {
|
45
|
+
let idx = index;
|
46
|
+
const parsedValue = ValueParser_1.ValueParser.parse(lexemes, idx);
|
47
|
+
const value = parsedValue.value;
|
48
|
+
idx = parsedValue.newIndex;
|
49
|
+
if (idx >= lexemes.length) {
|
50
|
+
return { value: value, newIndex: idx };
|
51
|
+
}
|
52
|
+
// asc, desc
|
53
|
+
const sortDirection = idx >= lexemes.length
|
54
|
+
? null
|
55
|
+
: lexemes[idx].value === 'asc'
|
56
|
+
? (idx++, Clause_1.SortDirection.Ascending)
|
57
|
+
: lexemes[idx].value === 'desc'
|
58
|
+
? (idx++, Clause_1.SortDirection.Descending)
|
59
|
+
: null;
|
60
|
+
// nulls first, nulls last
|
61
|
+
const nullsSortDirection = idx >= lexemes.length
|
62
|
+
? null
|
63
|
+
: lexemes[idx].value === 'nulls first'
|
64
|
+
? (idx++, Clause_1.NullsSortDirection.First)
|
65
|
+
: lexemes[idx].value === 'nulls last'
|
66
|
+
? (idx++, Clause_1.NullsSortDirection.Last)
|
67
|
+
: null;
|
68
|
+
if (sortDirection === null && nullsSortDirection === null) {
|
69
|
+
return { value: value, newIndex: idx };
|
70
|
+
}
|
71
|
+
return { value: new Clause_1.OrderByItem(value, sortDirection, nullsSortDirection), newIndex: idx };
|
72
|
+
}
|
73
|
+
}
|
74
|
+
exports.OrderByClauseParser = OrderByClauseParser;
|
75
|
+
//# sourceMappingURL=OrderByClauseParser.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"OrderByClauseParser.js","sourceRoot":"","sources":["../../src/parsers/OrderByClauseParser.ts"],"names":[],"mappings":";;;AAAA,6CAAmH;AACnH,6CAAqD;AACrD,iDAA8C;AAC9C,+CAA4C;AAE5C,MAAa,mBAAmB;IACrB,MAAM,CAAC,aAAa,CAAC,KAAa;QACrC,MAAM,SAAS,GAAG,IAAI,2BAAY,CAAC,KAAK,CAAC,CAAC,CAAC,uBAAuB;QAClE,MAAM,OAAO,GAAG,SAAS,CAAC,UAAU,EAAE,CAAC,CAAC,aAAa;QAErD,QAAQ;QACR,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;QAEtC,sCAAsC;QACtC,IAAI,MAAM,CAAC,QAAQ,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;YACnC,MAAM,IAAI,KAAK,CAAC,mCAAmC,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,KAAK,iBAAiB,MAAM,CAAC,QAAQ,oEAAoE,CAAC,CAAC;QAC3L,CAAC;QAED,OAAO,MAAM,CAAC,KAAK,CAAC;IACxB,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,OAAiB,EAAE,KAAa;QAChD,IAAI,GAAG,GAAG,KAAK,CAAC;QAEhB,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,KAAK,UAAU,EAAE,CAAC;YACpC,MAAM,IAAI,KAAK,CAAC,4BAA4B,GAAG,4CAA4C,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,4DAA4D,CAAC,CAAC;QAC/K,CAAC;QACD,GAAG,EAAE,CAAC;QAEN,MAAM,KAAK,GAAuB,EAAE,CAAC;QACrC,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;QAC1C,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACvB,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC;QAEpB,OAAO,GAAG,GAAG,OAAO,CAAC,MAAM,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK,kBAAS,CAAC,KAAK,EAAE,CAAC;YACnE,GAAG,EAAE,CAAC;YACN,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YAC1C,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;YACvB,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC;QACxB,CAAC;QAED,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACrB,MAAM,IAAI,KAAK,CAAC,4BAA4B,KAAK,oGAAoG,CAAC,CAAC;QAC3J,CAAC;aAAM,CAAC;YACJ,MAAM,MAAM,GAAG,IAAI,sBAAa,CAAC,KAAK,CAAC,CAAC;YACxC,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,GAAG,EAAE,CAAC;QAC5C,CAAC;IACL,CAAC;IAEO,MAAM,CAAC,SAAS,CAAC,OAAiB,EAAE,KAAa;QACrD,IAAI,GAAG,GAAG,KAAK,CAAC;QAEhB,MAAM,WAAW,GAAG,yBAAW,CAAC,KAAK,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;QACpD,MAAM,KAAK,GAAG,WAAW,CAAC,KAAK,CAAC;QAChC,GAAG,GAAG,WAAW,CAAC,QAAQ,CAAC;QAE3B,IAAI,GAAG,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;YACxB,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,QAAQ,EAAE,GAAG,EAAE,CAAC;QAC3C,CAAC;QAED,YAAY;QACZ,MAAM,aAAa,GAAG,GAAG,IAAI,OAAO,CAAC,MAAM;YACvC,CAAC,CAAC,IAAI;YACN,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,KAAK,KAAK;gBAC1B,CAAC,CAAC,CAAC,GAAG,EAAE,EAAE,sBAAa,CAAC,SAAS,CAAC;gBAClC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,KAAK,MAAM;oBAC3B,CAAC,CAAC,CAAC,GAAG,EAAE,EAAE,sBAAa,CAAC,UAAU,CAAC;oBACnC,CAAC,CAAC,IAAI,CAAC;QAEnB,0BAA0B;QAC1B,MAAM,kBAAkB,GAAG,GAAG,IAAI,OAAO,CAAC,MAAM;YAC5C,CAAC,CAAC,IAAI;YACN,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,KAAK,aAAa;gBAClC,CAAC,CAAC,CAAC,GAAG,EAAE,EAAE,2BAAkB,CAAC,KAAK,CAAC;gBACnC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,KAAK,YAAY;oBACjC,CAAC,CAAC,CAAC,GAAG,EAAE,EAAE,2BAAkB,CAAC,IAAI,CAAC;oBAClC,CAAC,CAAC,IAAI,CAAC;QAEnB,IAAI,aAAa,KAAK,IAAI,IAAI,kBAAkB,KAAK,IAAI,EAAE,CAAC;YACxD,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,QAAQ,EAAE,GAAG,EAAE,CAAC;QAC3C,CAAC;QAED,OAAO,EAAE,KAAK,EAAE,IAAI,oBAAW,CAAC,KAAK,EAAE,aAAa,EAAE,kBAAkB,CAAC,EAAE,QAAQ,EAAE,GAAG,EAAE,CAAC;IAC/F,CAAC;CACJ;AA/ED,kDA+EC"}
|
@@ -0,0 +1,44 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.OverExpressionParser = void 0;
|
4
|
+
const Lexeme_1 = require("../models/Lexeme");
|
5
|
+
const ValueComponent_1 = require("../models/ValueComponent");
|
6
|
+
const SqlTokenizer_1 = require("./SqlTokenizer");
|
7
|
+
const WindowExpressionParser_1 = require("./WindowExpressionParser");
|
8
|
+
class OverExpressionParser {
|
9
|
+
static parseFromText(query) {
|
10
|
+
const tokenizer = new SqlTokenizer_1.SqlTokenizer(query); // Initialize tokenizer
|
11
|
+
const lexemes = tokenizer.readLexmes(); // Get tokens
|
12
|
+
// Parse
|
13
|
+
const result = this.parse(lexemes, 0);
|
14
|
+
// Error if there are remaining tokens
|
15
|
+
if (result.newIndex < lexemes.length) {
|
16
|
+
throw new Error(`Syntax error: Unexpected token "${lexemes[result.newIndex].value}" at position ${result.newIndex}. The OVER expression is complete but there are additional tokens.`);
|
17
|
+
}
|
18
|
+
return result.value;
|
19
|
+
}
|
20
|
+
static parse(lexemes, index) {
|
21
|
+
let idx = index;
|
22
|
+
if (lexemes[idx].value !== 'over') {
|
23
|
+
throw new Error(`Syntax error at position ${idx}: Expected 'OVER' keyword but found "${lexemes[idx].value}". OVER expressions must start with the OVER keyword.`);
|
24
|
+
}
|
25
|
+
idx++;
|
26
|
+
if (idx >= lexemes.length) {
|
27
|
+
throw new Error(`Syntax error: Unexpected end of input after 'OVER' keyword. Expected either a window name or an opening parenthesis '('.`);
|
28
|
+
}
|
29
|
+
if (lexemes[idx].type === Lexeme_1.TokenType.Identifier) {
|
30
|
+
// named window frame
|
31
|
+
const name = lexemes[idx].value;
|
32
|
+
idx++;
|
33
|
+
return { value: new ValueComponent_1.IdentifierString(name), newIndex: idx };
|
34
|
+
}
|
35
|
+
if (lexemes[idx].type === Lexeme_1.TokenType.OpenParen) {
|
36
|
+
// Delegate processing to WindowFrameExpressionParser
|
37
|
+
const result = WindowExpressionParser_1.WindowExpressionParser.parse(lexemes, idx);
|
38
|
+
return result;
|
39
|
+
}
|
40
|
+
throw new Error(`Syntax error at position ${idx}: Expected a window name or opening parenthesis '(' after OVER keyword, but found "${lexemes[idx].value}".`);
|
41
|
+
}
|
42
|
+
}
|
43
|
+
exports.OverExpressionParser = OverExpressionParser;
|
44
|
+
//# sourceMappingURL=OverExpressionParser.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"OverExpressionParser.js","sourceRoot":"","sources":["../../src/parsers/OverExpressionParser.ts"],"names":[],"mappings":";;;AAAA,6CAAqD;AACrD,6DAA4E;AAC5E,iDAA8C;AAC9C,qEAAkE;AAElE,MAAa,oBAAoB;IACtB,MAAM,CAAC,aAAa,CAAC,KAAa;QACrC,MAAM,SAAS,GAAG,IAAI,2BAAY,CAAC,KAAK,CAAC,CAAC,CAAC,uBAAuB;QAClE,MAAM,OAAO,GAAG,SAAS,CAAC,UAAU,EAAE,CAAC,CAAC,aAAa;QAErD,QAAQ;QACR,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;QAEtC,sCAAsC;QACtC,IAAI,MAAM,CAAC,QAAQ,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;YACnC,MAAM,IAAI,KAAK,CAAC,mCAAmC,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,KAAK,iBAAiB,MAAM,CAAC,QAAQ,oEAAoE,CAAC,CAAC;QAC3L,CAAC;QAED,OAAO,MAAM,CAAC,KAAK,CAAC;IACxB,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,OAAiB,EAAE,KAAa;QAChD,IAAI,GAAG,GAAG,KAAK,CAAC;QAEhB,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,KAAK,MAAM,EAAE,CAAC;YAChC,MAAM,IAAI,KAAK,CAAC,4BAA4B,GAAG,wCAAwC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,uDAAuD,CAAC,CAAC;QACtK,CAAC;QACD,GAAG,EAAE,CAAC;QAEN,IAAI,GAAG,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;YACxB,MAAM,IAAI,KAAK,CAAC,0HAA0H,CAAC,CAAC;QAChJ,CAAC;QAED,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK,kBAAS,CAAC,UAAU,EAAE,CAAC;YAC7C,qBAAqB;YACrB,MAAM,IAAI,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC;YAChC,GAAG,EAAE,CAAC;YACN,OAAO,EAAE,KAAK,EAAE,IAAI,iCAAgB,CAAC,IAAI,CAAC,EAAE,QAAQ,EAAE,GAAG,EAAE,CAAC;QAChE,CAAC;QAED,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK,kBAAS,CAAC,SAAS,EAAE,CAAC;YAC5C,qDAAqD;YACrD,MAAM,MAAM,GAAG,+CAAsB,CAAC,KAAK,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YAC1D,OAAO,MAAM,CAAC;QAClB,CAAC;QAED,MAAM,IAAI,KAAK,CAAC,4BAA4B,GAAG,sFAAsF,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,IAAI,CAAC,CAAC;IACjK,CAAC;CACJ;AA3CD,oDA2CC"}
|
@@ -0,0 +1,15 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.ParameterExpressionParser = void 0;
|
4
|
+
const ValueComponent_1 = require("../models/ValueComponent");
|
5
|
+
class ParameterExpressionParser {
|
6
|
+
static parse(lexemes, index) {
|
7
|
+
let idx = index;
|
8
|
+
// Exclude the parameter symbol (first character)
|
9
|
+
const value = new ValueComponent_1.ParameterExpression(lexemes[idx].value.slice(1));
|
10
|
+
idx++;
|
11
|
+
return { value, newIndex: idx };
|
12
|
+
}
|
13
|
+
}
|
14
|
+
exports.ParameterExpressionParser = ParameterExpressionParser;
|
15
|
+
//# sourceMappingURL=ParameterExpressionParser.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"ParameterExpressionParser.js","sourceRoot":"","sources":["../../src/parsers/ParameterExpressionParser.ts"],"names":[],"mappings":";;;AACA,6DAA+E;AAE/E,MAAa,yBAAyB;IAC3B,MAAM,CAAC,KAAK,CAAC,OAAiB,EAAE,KAAa;QAChD,IAAI,GAAG,GAAG,KAAK,CAAC;QAChB,iDAAiD;QACjD,MAAM,KAAK,GAAG,IAAI,oCAAmB,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;QACnE,GAAG,EAAE,CAAC;QACN,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,GAAG,EAAE,CAAC;IACpC,CAAC;CACJ;AARD,8DAQC"}
|
@@ -0,0 +1,33 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.ParenExpressionParser = void 0;
|
4
|
+
const Lexeme_1 = require("../models/Lexeme");
|
5
|
+
const ValueComponent_1 = require("../models/ValueComponent");
|
6
|
+
const SelectQueryParser_1 = require("./SelectQueryParser");
|
7
|
+
const ValueParser_1 = require("./ValueParser");
|
8
|
+
class ParenExpressionParser {
|
9
|
+
static parse(lexemes, index) {
|
10
|
+
let idx = index;
|
11
|
+
// check inline query
|
12
|
+
if (idx + 1 < lexemes.length && lexemes[idx].type === Lexeme_1.TokenType.OpenParen && (lexemes[idx + 1].value === "select" || lexemes[idx + 1].value === "values" || lexemes[idx + 1].value === "with")) {
|
13
|
+
idx += 1; // Skip the '(' token
|
14
|
+
const result = SelectQueryParser_1.SelectQueryParser.parse(lexemes, idx);
|
15
|
+
idx = result.newIndex;
|
16
|
+
// Check for closing parenthesis
|
17
|
+
if (idx >= lexemes.length || lexemes[idx].type !== Lexeme_1.TokenType.CloseParen) {
|
18
|
+
throw new Error(`Expected ')' at index ${idx}, but found ${lexemes[idx].value}`);
|
19
|
+
}
|
20
|
+
idx++; // Skip the ')' token
|
21
|
+
const value = new ValueComponent_1.InlineQuery(result.value);
|
22
|
+
return { value, newIndex: idx };
|
23
|
+
}
|
24
|
+
else {
|
25
|
+
const result = ValueParser_1.ValueParser.parseArgument(Lexeme_1.TokenType.OpenParen, Lexeme_1.TokenType.CloseParen, lexemes, index);
|
26
|
+
idx = result.newIndex;
|
27
|
+
const value = new ValueComponent_1.ParenExpression(result.value);
|
28
|
+
return { value, newIndex: idx };
|
29
|
+
}
|
30
|
+
}
|
31
|
+
}
|
32
|
+
exports.ParenExpressionParser = ParenExpressionParser;
|
33
|
+
//# sourceMappingURL=ParenExpressionParser.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"ParenExpressionParser.js","sourceRoot":"","sources":["../../src/parsers/ParenExpressionParser.ts"],"names":[],"mappings":";;;AAAA,6CAAqD;AACrD,6DAAwF;AACxF,2DAAwD;AACxD,+CAA4C;AAE5C,MAAa,qBAAqB;IACvB,MAAM,CAAC,KAAK,CAAC,OAAiB,EAAE,KAAa;QAChD,IAAI,GAAG,GAAG,KAAK,CAAC;QAEhB,qBAAqB;QACrB,IAAI,GAAG,GAAG,CAAC,GAAG,OAAO,CAAC,MAAM,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK,kBAAS,CAAC,SAAS,IAAI,CACzE,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,KAAK,KAAK,QAAQ,IAAI,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,KAAK,KAAK,QAAQ,IAAI,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,KAAK,KAAK,MAAM,CAClH,EAAE,CAAC;YACA,GAAG,IAAI,CAAC,CAAC,CAAC,qBAAqB;YAC/B,MAAM,MAAM,GAAG,qCAAiB,CAAC,KAAK,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YACrD,GAAG,GAAG,MAAM,CAAC,QAAQ,CAAC;YAEtB,gCAAgC;YAChC,IAAI,GAAG,IAAI,OAAO,CAAC,MAAM,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK,kBAAS,CAAC,UAAU,EAAE,CAAC;gBACtE,MAAM,IAAI,KAAK,CAAC,yBAAyB,GAAG,eAAe,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC;YACrF,CAAC;YACD,GAAG,EAAE,CAAC,CAAC,qBAAqB;YAE5B,MAAM,KAAK,GAAG,IAAI,4BAAW,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAC5C,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,GAAG,EAAE,CAAC;QACpC,CAAC;aAAM,CAAC;YACJ,MAAM,MAAM,GAAG,yBAAW,CAAC,aAAa,CAAC,kBAAS,CAAC,SAAS,EAAE,kBAAS,CAAC,UAAU,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC;YACpG,GAAG,GAAG,MAAM,CAAC,QAAQ,CAAC;YAEtB,MAAM,KAAK,GAAG,IAAI,gCAAe,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAChD,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,GAAG,EAAE,CAAC;QACpC,CAAC;IACL,CAAC;CACJ;AA5BD,sDA4BC"}
|
@@ -0,0 +1,51 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.PartitionByParser = void 0;
|
4
|
+
const Clause_1 = require("../models/Clause");
|
5
|
+
const Lexeme_1 = require("../models/Lexeme");
|
6
|
+
const ValueComponent_1 = require("../models/ValueComponent");
|
7
|
+
const SqlTokenizer_1 = require("./SqlTokenizer");
|
8
|
+
const ValueParser_1 = require("./ValueParser");
|
9
|
+
class PartitionByParser {
|
10
|
+
static parseFromText(query) {
|
11
|
+
const tokenizer = new SqlTokenizer_1.SqlTokenizer(query); // Initialize tokenizer
|
12
|
+
const lexemes = tokenizer.readLexmes(); // Get tokens
|
13
|
+
// Parse
|
14
|
+
const result = this.parse(lexemes, 0);
|
15
|
+
// Error if there are remaining tokens
|
16
|
+
if (result.newIndex < lexemes.length) {
|
17
|
+
throw new Error(`Syntax error: Unexpected token "${lexemes[result.newIndex].value}" at position ${result.newIndex}. The PARTITION BY clause is complete but there are additional tokens.`);
|
18
|
+
}
|
19
|
+
return result.value;
|
20
|
+
}
|
21
|
+
static parse(lexemes, index) {
|
22
|
+
let idx = index;
|
23
|
+
if (lexemes[idx].value !== 'partition by') {
|
24
|
+
throw new Error(`Syntax error at position ${idx}: Expected 'PARTITION BY' keyword but found "${lexemes[idx].value}". PARTITION BY clauses must start with the PARTITION BY keywords.`);
|
25
|
+
}
|
26
|
+
idx++;
|
27
|
+
const items = [];
|
28
|
+
const item = ValueParser_1.ValueParser.parse(lexemes, idx);
|
29
|
+
items.push(item.value);
|
30
|
+
idx = item.newIndex;
|
31
|
+
while (idx < lexemes.length && lexemes[idx].type === Lexeme_1.TokenType.Comma) {
|
32
|
+
idx++;
|
33
|
+
const item = ValueParser_1.ValueParser.parse(lexemes, idx);
|
34
|
+
items.push(item.value);
|
35
|
+
idx = item.newIndex;
|
36
|
+
}
|
37
|
+
if (items.length === 0) {
|
38
|
+
throw new Error(`Syntax error at position ${index}: No partition expressions found. The PARTITION BY clause requires at least one expression to partition by.`);
|
39
|
+
}
|
40
|
+
else if (items.length === 1) {
|
41
|
+
const clause = new Clause_1.PartitionByClause(items[0]);
|
42
|
+
return { value: clause, newIndex: idx };
|
43
|
+
}
|
44
|
+
else {
|
45
|
+
const clause = new Clause_1.PartitionByClause(new ValueComponent_1.ValueList(items));
|
46
|
+
return { value: clause, newIndex: idx };
|
47
|
+
}
|
48
|
+
}
|
49
|
+
}
|
50
|
+
exports.PartitionByParser = PartitionByParser;
|
51
|
+
//# sourceMappingURL=PartitionByParser.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"PartitionByParser.js","sourceRoot":"","sources":["../../src/parsers/PartitionByParser.ts"],"names":[],"mappings":";;;AAAA,6CAAqD;AACrD,6CAAqD;AACrD,6DAAqE;AACrE,iDAA8C;AAC9C,+CAA4C;AAE5C,MAAa,iBAAiB;IACnB,MAAM,CAAC,aAAa,CAAC,KAAa;QACrC,MAAM,SAAS,GAAG,IAAI,2BAAY,CAAC,KAAK,CAAC,CAAC,CAAC,uBAAuB;QAClE,MAAM,OAAO,GAAG,SAAS,CAAC,UAAU,EAAE,CAAC,CAAC,aAAa;QAErD,QAAQ;QACR,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;QAEtC,sCAAsC;QACtC,IAAI,MAAM,CAAC,QAAQ,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;YACnC,MAAM,IAAI,KAAK,CAAC,mCAAmC,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,KAAK,iBAAiB,MAAM,CAAC,QAAQ,wEAAwE,CAAC,CAAC;QAC/L,CAAC;QAED,OAAO,MAAM,CAAC,KAAK,CAAC;IACxB,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,OAAiB,EAAE,KAAa;QAChD,IAAI,GAAG,GAAG,KAAK,CAAC;QAEhB,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,KAAK,cAAc,EAAE,CAAC;YACxC,MAAM,IAAI,KAAK,CAAC,4BAA4B,GAAG,gDAAgD,OAAO,CAAC,GAAG,CAAC,CAAC,KAAK,oEAAoE,CAAC,CAAC;QAC3L,CAAC;QACD,GAAG,EAAE,CAAC;QAEN,MAAM,KAAK,GAAqB,EAAE,CAAC;QACnC,MAAM,IAAI,GAAG,yBAAW,CAAC,KAAK,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;QAC7C,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACvB,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC;QAEpB,OAAO,GAAG,GAAG,OAAO,CAAC,MAAM,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK,kBAAS,CAAC,KAAK,EAAE,CAAC;YACnE,GAAG,EAAE,CAAC;YACN,MAAM,IAAI,GAAG,yBAAW,CAAC,KAAK,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YAC7C,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;YACvB,GAAG,GAAG,IAAI,CAAC,QAAQ,CAAC;QACxB,CAAC;QAED,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACrB,MAAM,IAAI,KAAK,CAAC,4BAA4B,KAAK,6GAA6G,CAAC,CAAC;QACpK,CAAC;aAAM,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YAC5B,MAAM,MAAM,GAAG,IAAI,0BAAiB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;YAC/C,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,GAAG,EAAE,CAAC;QAC5C,CAAC;aAAM,CAAC;YACJ,MAAM,MAAM,GAAG,IAAI,0BAAiB,CAAC,IAAI,0BAAS,CAAC,KAAK,CAAC,CAAC,CAAC;YAC3D,OAAO,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,GAAG,EAAE,CAAC;QAC5C,CAAC;IACL,CAAC;CACJ;AA9CD,8CA8CC"}
|