@forge/sql 2.2.3 → 2.3.0-next.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/out/__test__/migration.test.js +92 -11
- package/out/__test__/sql.test.js +74 -42
- package/out/errors.d.ts +4 -2
- package/out/errors.d.ts.map +1 -1
- package/out/errors.js +12 -2
- package/out/migration.d.ts.map +1 -1
- package/out/migration.js +5 -5
- package/out/sql.d.ts +11 -3
- package/out/sql.d.ts.map +1 -1
- package/out/sql.js +18 -6
- package/out/utils/error-handling.d.ts.map +1 -1
- package/package.json +1 -1
|
@@ -5,6 +5,7 @@ const sql_1 = require("../sql");
|
|
|
5
5
|
const errorCodes_1 = require("../errorCodes");
|
|
6
6
|
const jest_when_1 = require("jest-when");
|
|
7
7
|
const sql_statement_1 = require("../sql-statement");
|
|
8
|
+
const errors_1 = require("../errors");
|
|
8
9
|
jest.mock('../sql');
|
|
9
10
|
describe('Migration', () => {
|
|
10
11
|
let migrationRunner;
|
|
@@ -38,25 +39,25 @@ describe('Migration', () => {
|
|
|
38
39
|
describe('list', () => {
|
|
39
40
|
it('should list all previously migrations', async () => {
|
|
40
41
|
const migratedAtDate = new Date();
|
|
41
|
-
mockSqlClient.
|
|
42
|
+
mockSqlClient.executeDDL.mockResolvedValueOnce({
|
|
42
43
|
rows: [{ id: 1, name: 'create-table-test', migratedAt: migratedAtDate }]
|
|
43
44
|
});
|
|
44
45
|
const migrations = await migrationRunner.list();
|
|
45
46
|
expect(migrations.length).toBe(1);
|
|
46
47
|
expect(migrations[0]).toEqual({ id: 1, name: 'create-table-test', migratedAt: migratedAtDate });
|
|
47
|
-
expect(mockSqlClient.
|
|
48
|
+
expect(mockSqlClient.executeDDL).toHaveBeenCalledWith('SELECT id, name, migratedAt FROM __migrations;');
|
|
48
49
|
});
|
|
49
50
|
it('should be empty when no previous migrations exist', async () => {
|
|
50
|
-
mockSqlClient.
|
|
51
|
+
mockSqlClient.executeDDL.mockResolvedValueOnce({
|
|
51
52
|
rows: []
|
|
52
53
|
});
|
|
53
54
|
const migrations = await migrationRunner.list();
|
|
54
55
|
expect(migrations.length).toBe(0);
|
|
55
|
-
expect(mockSqlClient.
|
|
56
|
+
expect(mockSqlClient.executeDDL).toHaveBeenCalledWith('SELECT id, name, migratedAt FROM __migrations;');
|
|
56
57
|
});
|
|
57
58
|
it('should raise an error when schema version table does not exist', async () => {
|
|
58
59
|
const tableDoesNotExistError = new Error(errorCodes_1.errorCodes.SQL_EXECUTION_ERROR);
|
|
59
|
-
mockSqlClient.
|
|
60
|
+
mockSqlClient.executeDDL.mockRejectedValue(tableDoesNotExistError);
|
|
60
61
|
await expect(migrationRunner.list()).rejects.toThrow(tableDoesNotExistError);
|
|
61
62
|
});
|
|
62
63
|
});
|
|
@@ -72,10 +73,10 @@ describe('Migration', () => {
|
|
|
72
73
|
it('should execute migrations that have not been previously run', async () => {
|
|
73
74
|
migrationRunner.enqueue('v_001_create_table_foo', CREATE_TABLE_FOO_QUERY);
|
|
74
75
|
migrationRunner.enqueue('v_002_create_table_bar', CREATE_TABLE_BAR_QUERY);
|
|
75
|
-
(0, jest_when_1.when)(mockSqlClient.
|
|
76
|
+
(0, jest_when_1.when)(mockSqlClient.executeDDL)
|
|
76
77
|
.calledWith((0, jest_when_1.when)((arg) => arg.startsWith('CREATE TABLE IF NOT EXISTS __migrations')))
|
|
77
78
|
.mockResolvedValue({ rows: [] });
|
|
78
|
-
(0, jest_when_1.when)(mockSqlClient.
|
|
79
|
+
(0, jest_when_1.when)(mockSqlClient.executeDDL)
|
|
79
80
|
.calledWith('SELECT id, name, migratedAt FROM __migrations;')
|
|
80
81
|
.mockResolvedValue({
|
|
81
82
|
rows: [{ name: 'v_001_create_table_foo', statement: CREATE_TABLE_FOO_QUERY, migratedAt: new Date() }]
|
|
@@ -90,7 +91,87 @@ describe('Migration', () => {
|
|
|
90
91
|
const result = await migrationRunner.run();
|
|
91
92
|
expect(result).toEqual(['v_002_create_table_bar']);
|
|
92
93
|
expect(mockSqlClient.prepare).toHaveBeenCalledTimes(1);
|
|
93
|
-
expect(mockSqlClient.
|
|
94
|
+
expect(mockSqlClient.executeDDL).toHaveBeenCalledTimes(3);
|
|
95
|
+
});
|
|
96
|
+
it('should throw forge sql api error during check point', async () => {
|
|
97
|
+
migrationRunner.enqueue('v_001_create_table_foo', CREATE_TABLE_FOO_QUERY);
|
|
98
|
+
migrationRunner.enqueue('v_002_create_table_bar', CREATE_TABLE_BAR_QUERY);
|
|
99
|
+
(0, jest_when_1.when)(mockSqlClient.executeDDL)
|
|
100
|
+
.calledWith((0, jest_when_1.when)((arg) => arg.startsWith('CREATE TABLE IF NOT EXISTS __migrations')))
|
|
101
|
+
.mockResolvedValue({ rows: [] });
|
|
102
|
+
(0, jest_when_1.when)(mockSqlClient.executeDDL)
|
|
103
|
+
.calledWith('SELECT id, name, migratedAt FROM __migrations;')
|
|
104
|
+
.mockResolvedValue({
|
|
105
|
+
rows: [{ name: 'v_001_create_table_foo', statement: CREATE_TABLE_FOO_QUERY, migratedAt: new Date() }]
|
|
106
|
+
});
|
|
107
|
+
(0, jest_when_1.when)(mockSqlClient.executeDDL)
|
|
108
|
+
.calledWith(CREATE_TABLE_BAR_QUERY)
|
|
109
|
+
.mockRejectedValue(new errors_1.ForgeSQLAPIError({ status: 400, statusText: 'Invalid query' }, { code: 'SQL_EXECUTION_ERROR', message: '', suggestion: 'Check your SQL query' }));
|
|
110
|
+
await expect(migrationRunner.run()).rejects.toMatchObject({
|
|
111
|
+
migrationName: 'v_002_create_table_bar',
|
|
112
|
+
migrationsYetToRun: ['v_002_create_table_bar'],
|
|
113
|
+
cause: new errors_1.ForgeSQLAPIError({ status: 400, statusText: 'Invalid query' }, { code: 'SQL_EXECUTION_ERROR', message: '', suggestion: 'Check your SQL query' })
|
|
114
|
+
});
|
|
115
|
+
});
|
|
116
|
+
it('should throw an error when a migration fails', async () => {
|
|
117
|
+
migrationRunner.enqueue('v_001_create_table_foo', CREATE_TABLE_FOO_QUERY);
|
|
118
|
+
migrationRunner.enqueue('v_002_create_table_bar', CREATE_TABLE_BAR_QUERY);
|
|
119
|
+
(0, jest_when_1.when)(mockSqlClient.executeDDL)
|
|
120
|
+
.calledWith((0, jest_when_1.when)((arg) => arg.startsWith('CREATE TABLE IF NOT EXISTS __migrations')))
|
|
121
|
+
.mockResolvedValue({ rows: [] });
|
|
122
|
+
(0, jest_when_1.when)(mockSqlClient.executeDDL)
|
|
123
|
+
.calledWith('SELECT id, name, migratedAt FROM __migrations;')
|
|
124
|
+
.mockResolvedValue({
|
|
125
|
+
rows: [{ name: 'v_001_create_table_foo', statement: CREATE_TABLE_FOO_QUERY, migratedAt: new Date() }]
|
|
126
|
+
});
|
|
127
|
+
(0, jest_when_1.when)(mockSqlClient.executeDDL)
|
|
128
|
+
.calledWith(CREATE_TABLE_BAR_QUERY)
|
|
129
|
+
.mockRejectedValue(new errors_1.ForgeSQLAPIError({ status: 400, statusText: 'Invalid query' }, { code: 'SQL_EXECUTION_ERROR', message: '', suggestion: 'Check your SQL query' }));
|
|
130
|
+
await expect(migrationRunner.run()).rejects.toMatchObject({
|
|
131
|
+
migrationName: 'v_002_create_table_bar',
|
|
132
|
+
migrationsYetToRun: ['v_002_create_table_bar'],
|
|
133
|
+
cause: new errors_1.ForgeSQLAPIError({ status: 400, statusText: 'Invalid query' }, { code: 'SQL_EXECUTION_ERROR', message: '', suggestion: 'Check your SQL query' })
|
|
134
|
+
});
|
|
135
|
+
});
|
|
136
|
+
it('should throw generic error when a migration fails', async () => {
|
|
137
|
+
migrationRunner.enqueue('v_001_create_table_foo', CREATE_TABLE_FOO_QUERY);
|
|
138
|
+
migrationRunner.enqueue('v_002_create_table_bar', CREATE_TABLE_BAR_QUERY);
|
|
139
|
+
(0, jest_when_1.when)(mockSqlClient.executeDDL)
|
|
140
|
+
.calledWith((0, jest_when_1.when)((arg) => arg.startsWith('CREATE TABLE IF NOT EXISTS __migrations')))
|
|
141
|
+
.mockResolvedValue({ rows: [] });
|
|
142
|
+
(0, jest_when_1.when)(mockSqlClient.executeDDL)
|
|
143
|
+
.calledWith('SELECT id, name, migratedAt FROM __migrations;')
|
|
144
|
+
.mockResolvedValue({
|
|
145
|
+
rows: [{ name: 'v_001_create_table_foo', statement: CREATE_TABLE_FOO_QUERY, migratedAt: new Date() }]
|
|
146
|
+
});
|
|
147
|
+
(0, jest_when_1.when)(mockSqlClient.executeDDL)
|
|
148
|
+
.calledWith(CREATE_TABLE_BAR_QUERY)
|
|
149
|
+
.mockRejectedValue(new Error('Failed to execute migration'));
|
|
150
|
+
await expect(migrationRunner.run()).rejects.toMatchObject({
|
|
151
|
+
migrationName: 'v_002_create_table_bar',
|
|
152
|
+
migrationsYetToRun: ['v_002_create_table_bar'],
|
|
153
|
+
cause: new Error('Failed to execute migration')
|
|
154
|
+
});
|
|
155
|
+
});
|
|
156
|
+
it('should throw generic error when check point fails', async () => {
|
|
157
|
+
migrationRunner.enqueue('v_001_create_table_foo', CREATE_TABLE_FOO_QUERY);
|
|
158
|
+
migrationRunner.enqueue('v_002_create_table_bar', CREATE_TABLE_BAR_QUERY);
|
|
159
|
+
(0, jest_when_1.when)(mockSqlClient.executeDDL)
|
|
160
|
+
.calledWith((0, jest_when_1.when)((arg) => arg.startsWith('CREATE TABLE IF NOT EXISTS __migrations')))
|
|
161
|
+
.mockResolvedValue({ rows: [] });
|
|
162
|
+
(0, jest_when_1.when)(mockSqlClient.executeDDL)
|
|
163
|
+
.calledWith('SELECT id, name, migratedAt FROM __migrations;')
|
|
164
|
+
.mockResolvedValue({
|
|
165
|
+
rows: [{ name: 'v_001_create_table_foo', statement: CREATE_TABLE_FOO_QUERY, migratedAt: new Date() }]
|
|
166
|
+
});
|
|
167
|
+
(0, jest_when_1.when)(mockSqlClient.executeDDL)
|
|
168
|
+
.calledWith(CREATE_TABLE_BAR_QUERY)
|
|
169
|
+
.mockRejectedValue(new Error('Failed to execute migration'));
|
|
170
|
+
await expect(migrationRunner.run()).rejects.toMatchObject({
|
|
171
|
+
migrationName: 'v_002_create_table_bar',
|
|
172
|
+
migrationsYetToRun: ['v_002_create_table_bar'],
|
|
173
|
+
cause: new Error('Failed to execute migration')
|
|
174
|
+
});
|
|
94
175
|
});
|
|
95
176
|
});
|
|
96
177
|
describe('when no migrations have been run in the past', () => {
|
|
@@ -98,10 +179,10 @@ describe('Migration', () => {
|
|
|
98
179
|
const CREATE_TABLE_FOO_QUERY = 'CREATE TABLE IF NOT EXISTS foo (id INT)';
|
|
99
180
|
migrationRunner.enqueue('v_001_create_table_foo', CREATE_TABLE_FOO_QUERY);
|
|
100
181
|
migrationRunner.enqueue('v_002_create_table_bar', 'CREATE TABLE IF NOT EXISTS bar (id INT)');
|
|
101
|
-
(0, jest_when_1.when)(mockSqlClient.
|
|
182
|
+
(0, jest_when_1.when)(mockSqlClient.executeDDL).calledWith('SELECT id, name, migratedAt FROM __migrations;').mockResolvedValue({
|
|
102
183
|
rows: []
|
|
103
184
|
});
|
|
104
|
-
(0, jest_when_1.when)(mockSqlClient.
|
|
185
|
+
(0, jest_when_1.when)(mockSqlClient.executeDDL)
|
|
105
186
|
.calledWith((0, jest_when_1.when)((arg) => arg.startsWith('CREATE TABLE IF NOT EXISTS __migrations')))
|
|
106
187
|
.mockResolvedValue({ rows: [] });
|
|
107
188
|
const mockApiCall = jest.fn();
|
|
@@ -114,7 +195,7 @@ describe('Migration', () => {
|
|
|
114
195
|
const result = await migrationRunner.run();
|
|
115
196
|
expect(result).toEqual(['v_001_create_table_foo', 'v_002_create_table_bar']);
|
|
116
197
|
expect(mockSqlClient.prepare).toHaveBeenCalledTimes(2);
|
|
117
|
-
expect(mockSqlClient.
|
|
198
|
+
expect(mockSqlClient.executeDDL).toHaveBeenCalledTimes(4);
|
|
118
199
|
});
|
|
119
200
|
});
|
|
120
201
|
});
|
package/out/__test__/sql.test.js
CHANGED
|
@@ -21,11 +21,61 @@ describe('SqlClient', () => {
|
|
|
21
21
|
mockFetch.mockResolvedValue(response);
|
|
22
22
|
return body;
|
|
23
23
|
}
|
|
24
|
+
describe('storageApi', () => {
|
|
25
|
+
it('should send a request and return the response body', async () => {
|
|
26
|
+
mockFetchExecute([]);
|
|
27
|
+
const result = await sqlClient['storageApi']('SELECT * FROM test', [], undefined, sql_1.SQL_API_ENDPOINTS.EXECUTE);
|
|
28
|
+
expect(mockFetch).toHaveBeenCalledWith('/api/v1/execute', {
|
|
29
|
+
method: 'POST',
|
|
30
|
+
body: JSON.stringify({ query: 'SELECT * FROM test', params: [], method: 'all' }),
|
|
31
|
+
redirect: 'follow',
|
|
32
|
+
headers: { 'Content-Type': 'application/json' }
|
|
33
|
+
});
|
|
34
|
+
expect(result).toEqual({ rows: [] });
|
|
35
|
+
});
|
|
36
|
+
it('should send a request with parameters and method', async () => {
|
|
37
|
+
mockFetchExecute([]);
|
|
38
|
+
const params = [1];
|
|
39
|
+
const result = await sqlClient['storageApi']('SELECT * FROM test WHERE id = ?', params, 'one', sql_1.SQL_API_ENDPOINTS.EXECUTE);
|
|
40
|
+
expect(mockFetch).toHaveBeenCalledWith('/api/v1/execute', {
|
|
41
|
+
method: 'POST',
|
|
42
|
+
body: JSON.stringify({ query: 'SELECT * FROM test WHERE id = ?', params, method: 'one' }),
|
|
43
|
+
redirect: 'follow',
|
|
44
|
+
headers: { 'Content-Type': 'application/json' }
|
|
45
|
+
});
|
|
46
|
+
expect(result).toEqual({ rows: [] });
|
|
47
|
+
});
|
|
48
|
+
it('should send a request to DDL endpoint with method', async () => {
|
|
49
|
+
mockFetchExecute([]);
|
|
50
|
+
const result = await sqlClient['storageApi']('CREATE TABLE test (id INT)', [], 'one', sql_1.SQL_API_ENDPOINTS.EXECUTE_DDL);
|
|
51
|
+
expect(mockFetch).toHaveBeenCalledWith('/api/v1/execute/ddl', {
|
|
52
|
+
method: 'POST',
|
|
53
|
+
body: JSON.stringify({ query: 'CREATE TABLE test (id INT)', params: [], method: 'one' }),
|
|
54
|
+
redirect: 'follow',
|
|
55
|
+
headers: { 'Content-Type': 'application/json' }
|
|
56
|
+
});
|
|
57
|
+
expect(result).toEqual({ rows: [] });
|
|
58
|
+
});
|
|
59
|
+
it('should handle invalid JSON body', async () => {
|
|
60
|
+
const responseText = 'Invalid JSON';
|
|
61
|
+
const response = new node_fetch_1.Response(responseText, { status: 200 });
|
|
62
|
+
mockFetch.mockResolvedValue(response);
|
|
63
|
+
await expect(sqlClient['storageApi']('SELECT * from strange;', [], 'one', sql_1.SQL_API_ENDPOINTS.EXECUTE)).rejects.toThrow(`Unexpected error. Response was not valid JSON: ${responseText}`);
|
|
64
|
+
});
|
|
65
|
+
it('should throw ForgeSQLAPIError on API error', async () => {
|
|
66
|
+
const forgeError = { code: 'INVALID_QUERY', message: 'Invalid SQL query' };
|
|
67
|
+
const mockResponse = new node_fetch_1.Response(JSON.stringify(forgeError), {
|
|
68
|
+
status: 400
|
|
69
|
+
});
|
|
70
|
+
mockFetch.mockResolvedValue(mockResponse);
|
|
71
|
+
await expect(sqlClient['storageApi']('INVALID SQL QUERY', [], undefined, sql_1.SQL_API_ENDPOINTS.EXECUTE)).rejects.toThrow(new errors_1.ForgeSQLAPIError({ status: mockResponse.status, statusText: mockResponse.statusText }, forgeError));
|
|
72
|
+
});
|
|
73
|
+
});
|
|
24
74
|
describe('sendRequest', () => {
|
|
25
75
|
it('should send a request with the correct options and return the response', async () => {
|
|
26
76
|
const mockResponse = { ok: true, status: 200 };
|
|
27
77
|
mockFetch.mockResolvedValue(mockResponse);
|
|
28
|
-
const path = 'api/v1/execute';
|
|
78
|
+
const path = '/api/v1/execute';
|
|
29
79
|
const options = { method: 'GET', headers: { 'Custom-Header': 'value' } };
|
|
30
80
|
const response = await sqlClient['sendRequest'](path, options);
|
|
31
81
|
expect(mockFetch).toHaveBeenCalledWith(path, {
|
|
@@ -41,7 +91,7 @@ describe('SqlClient', () => {
|
|
|
41
91
|
it('should handle requests without options', async () => {
|
|
42
92
|
const mockResponse = { ok: true, status: 200 };
|
|
43
93
|
mockFetch.mockResolvedValue(mockResponse);
|
|
44
|
-
const path = 'api/v1/execute';
|
|
94
|
+
const path = '/api/v1/execute';
|
|
45
95
|
const response = await sqlClient['sendRequest'](path);
|
|
46
96
|
expect(mockFetch).toHaveBeenCalledWith(path, {
|
|
47
97
|
redirect: 'follow',
|
|
@@ -54,50 +104,11 @@ describe('SqlClient', () => {
|
|
|
54
104
|
it('should propagate fetch errors', async () => {
|
|
55
105
|
const mockError = new Error('Network error');
|
|
56
106
|
mockFetch.mockRejectedValue(mockError);
|
|
57
|
-
const path = 'api/v1/execute';
|
|
107
|
+
const path = '/api/v1/execute';
|
|
58
108
|
await expect(sqlClient['sendRequest'](path)).rejects.toThrow(mockError);
|
|
59
109
|
expect(mockFetch).toHaveBeenCalledWith(path, expect.any(Object));
|
|
60
110
|
});
|
|
61
111
|
});
|
|
62
|
-
describe('storageApi', () => {
|
|
63
|
-
it('should send a request and return the response body', async () => {
|
|
64
|
-
mockFetchExecute([]);
|
|
65
|
-
const result = await sqlClient.storageApi('SELECT * FROM test');
|
|
66
|
-
expect(mockFetch).toHaveBeenCalledWith('api/v1/execute', {
|
|
67
|
-
method: 'POST',
|
|
68
|
-
body: JSON.stringify({ query: 'SELECT * FROM test', params: [], method: 'all' }),
|
|
69
|
-
redirect: 'follow',
|
|
70
|
-
headers: { 'Content-Type': 'application/json' }
|
|
71
|
-
});
|
|
72
|
-
expect(result).toEqual({ rows: [] });
|
|
73
|
-
});
|
|
74
|
-
it('should send a request with parameters and method', async () => {
|
|
75
|
-
mockFetchExecute([]);
|
|
76
|
-
const params = [1];
|
|
77
|
-
const result = await sqlClient.storageApi('SELECT * FROM test WHERE id = ?', params, 'one');
|
|
78
|
-
expect(mockFetch).toHaveBeenCalledWith('api/v1/execute', {
|
|
79
|
-
method: 'POST',
|
|
80
|
-
body: JSON.stringify({ query: 'SELECT * FROM test WHERE id = ?', params, method: 'one' }),
|
|
81
|
-
redirect: 'follow',
|
|
82
|
-
headers: { 'Content-Type': 'application/json' }
|
|
83
|
-
});
|
|
84
|
-
expect(result).toEqual({ rows: [] });
|
|
85
|
-
});
|
|
86
|
-
it('should handle invalid JSON body', async () => {
|
|
87
|
-
const responseText = 'Invalid JSON';
|
|
88
|
-
const response = new node_fetch_1.Response(responseText, { status: 200 });
|
|
89
|
-
mockFetch.mockResolvedValue(response);
|
|
90
|
-
await expect(sqlClient.storageApi('SELECT * from strange;')).rejects.toThrow(`Unexpected error. Response was not valid JSON: ${responseText}`);
|
|
91
|
-
});
|
|
92
|
-
it('should throw ForgeSQLAPIError on API error', async () => {
|
|
93
|
-
const forgeError = { code: 'INVALID_QUERY', message: 'Invalid SQL query' };
|
|
94
|
-
const mockResponse = new node_fetch_1.Response(JSON.stringify(forgeError), {
|
|
95
|
-
status: 400
|
|
96
|
-
});
|
|
97
|
-
mockFetch.mockResolvedValue(mockResponse);
|
|
98
|
-
await expect(sqlClient.storageApi('INVALID SQL QUERY')).rejects.toThrow(new errors_1.ForgeSQLAPIError({ status: mockResponse.status, statusText: mockResponse.statusText }, forgeError));
|
|
99
|
-
});
|
|
100
|
-
});
|
|
101
112
|
describe('prepare', () => {
|
|
102
113
|
it('should return a SqlStatement instance with query', () => {
|
|
103
114
|
const statement = sqlClient.prepare('INSERT INTO test VALUES (?, ?)');
|
|
@@ -156,4 +167,25 @@ describe('SqlClient', () => {
|
|
|
156
167
|
await expect(sqlClient._provision()).rejects.toThrow('Unexpected error in provision request');
|
|
157
168
|
});
|
|
158
169
|
});
|
|
170
|
+
describe('executeDDL', () => {
|
|
171
|
+
it('should return a valid Result object when DDL query gexecuted successfully', async () => {
|
|
172
|
+
const mockResponse = {
|
|
173
|
+
ok: true,
|
|
174
|
+
status: 200,
|
|
175
|
+
text: jest.fn().mockResolvedValue(JSON.stringify({ success: true }))
|
|
176
|
+
};
|
|
177
|
+
mockFetch.mockResolvedValue(mockResponse);
|
|
178
|
+
const result = await sqlClient.executeDDL('CREATE TABLE test (id INT)');
|
|
179
|
+
expect(result).toEqual({ success: true });
|
|
180
|
+
expect(mockFetch).toHaveBeenCalledWith('/api/v1/execute/ddl', expect.objectContaining({ method: 'POST' }));
|
|
181
|
+
});
|
|
182
|
+
it('should throw ForgeSQLError when response is not valid JSON', async () => {
|
|
183
|
+
const forgeError = { code: 'INVALID_QUERY', message: 'Invalid SQL query' };
|
|
184
|
+
const mockResponse = new node_fetch_1.Response(JSON.stringify(forgeError), {
|
|
185
|
+
status: 400
|
|
186
|
+
});
|
|
187
|
+
mockFetch.mockResolvedValue(mockResponse);
|
|
188
|
+
await expect(sqlClient.executeDDL('INVALID SQL QUERY')).rejects.toThrow(new errors_1.ForgeSQLAPIError(mockResponse, forgeError));
|
|
189
|
+
});
|
|
190
|
+
});
|
|
159
191
|
});
|
package/out/errors.d.ts
CHANGED
|
@@ -23,11 +23,13 @@ export declare class ForgeSQLAPIError extends ForgeSQLError {
|
|
|
23
23
|
export declare class MigrationExecutionError extends ForgeSQLError {
|
|
24
24
|
readonly migrationName: string;
|
|
25
25
|
readonly migrationsYetToRun: string[];
|
|
26
|
-
|
|
26
|
+
readonly cause?: Error | undefined;
|
|
27
|
+
constructor(migrationName: string, migrationsYetToRun: string[], cause?: Error | undefined);
|
|
27
28
|
}
|
|
28
29
|
export declare class MigrationCheckPointError extends ForgeSQLError {
|
|
29
30
|
readonly migrationName: string;
|
|
30
31
|
readonly migrationsYetToRun: string[];
|
|
31
|
-
|
|
32
|
+
readonly cause?: Error | undefined;
|
|
33
|
+
constructor(migrationName: string, migrationsYetToRun: string[], cause?: Error | undefined);
|
|
32
34
|
}
|
|
33
35
|
//# sourceMappingURL=errors.d.ts.map
|
package/out/errors.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"errors.d.ts","sourceRoot":"","sources":["../src/errors.ts"],"names":[],"mappings":"AACA,MAAM,WAAW,UAAU;IAKzB,IAAI,EAAE,MAAM,CAAC;IAGb,OAAO,EAAE,MAAM,CAAC;IAGhB,UAAU,CAAC,EAAE,MAAM,CAAC;IAGpB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;CAGnC;AAGD,qBAAa,aAAc,SAAQ,KAAK;gBAC1B,OAAO,EAAE,MAAM;CAI5B;AASD,MAAM,WAAW,uBAAuB;IAEtC,MAAM,EAAE,MAAM,CAAC;IAGf,UAAU,EAAE,MAAM,CAAC;IAGnB,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CACzB;AAMD,qBAAa,gBAAiB,SAAQ,aAAa;IAMjD,eAAe,EAAE,uBAAuB,CAAC;IAMzC,IAAI,EAAE,MAAM,CAAC;IAGb,OAAO,EAAE,MAAM,CAAC;IAGhB,UAAU,CAAC,EAAE,MAAM,CAAC;IAGpB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;gBAEtB,eAAe,EAAE,uBAAuB,EAAE,UAAU,EAAE,UAAU;CAgB7E;AAED,qBAAa,uBAAwB,SAAQ,aAAa;IAEtD,QAAQ,CAAC,aAAa,EAAE,MAAM;IAC9B,QAAQ,CAAC,kBAAkB,EAAE,MAAM,EAAE;
|
|
1
|
+
{"version":3,"file":"errors.d.ts","sourceRoot":"","sources":["../src/errors.ts"],"names":[],"mappings":"AACA,MAAM,WAAW,UAAU;IAKzB,IAAI,EAAE,MAAM,CAAC;IAGb,OAAO,EAAE,MAAM,CAAC;IAGhB,UAAU,CAAC,EAAE,MAAM,CAAC;IAGpB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;CAGnC;AAGD,qBAAa,aAAc,SAAQ,KAAK;gBAC1B,OAAO,EAAE,MAAM;CAI5B;AASD,MAAM,WAAW,uBAAuB;IAEtC,MAAM,EAAE,MAAM,CAAC;IAGf,UAAU,EAAE,MAAM,CAAC;IAGnB,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CACzB;AAMD,qBAAa,gBAAiB,SAAQ,aAAa;IAMjD,eAAe,EAAE,uBAAuB,CAAC;IAMzC,IAAI,EAAE,MAAM,CAAC;IAGb,OAAO,EAAE,MAAM,CAAC;IAGhB,UAAU,CAAC,EAAE,MAAM,CAAC;IAGpB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;gBAEtB,eAAe,EAAE,uBAAuB,EAAE,UAAU,EAAE,UAAU;CAgB7E;AAED,qBAAa,uBAAwB,SAAQ,aAAa;IAEtD,QAAQ,CAAC,aAAa,EAAE,MAAM;IAC9B,QAAQ,CAAC,kBAAkB,EAAE,MAAM,EAAE;IACrC,QAAQ,CAAC,KAAK,CAAC;gBAFN,aAAa,EAAE,MAAM,EACrB,kBAAkB,EAAE,MAAM,EAAE,EAC5B,KAAK,CAAC,mBAAO;CAOzB;AAED,qBAAa,wBAAyB,SAAQ,aAAa;IAEvD,QAAQ,CAAC,aAAa,EAAE,MAAM;IAC9B,QAAQ,CAAC,kBAAkB,EAAE,MAAM,EAAE;IACrC,QAAQ,CAAC,KAAK,CAAC;gBAFN,aAAa,EAAE,MAAM,EACrB,kBAAkB,EAAE,MAAM,EAAE,EAC5B,KAAK,CAAC,mBAAO;CAOzB"}
|
package/out/errors.js
CHANGED
|
@@ -29,20 +29,30 @@ exports.ForgeSQLAPIError = ForgeSQLAPIError;
|
|
|
29
29
|
class MigrationExecutionError extends ForgeSQLError {
|
|
30
30
|
migrationName;
|
|
31
31
|
migrationsYetToRun;
|
|
32
|
-
|
|
32
|
+
cause;
|
|
33
|
+
constructor(migrationName, migrationsYetToRun, cause) {
|
|
33
34
|
super(`Failed to execute migration with name ${migrationName}`);
|
|
34
35
|
this.migrationName = migrationName;
|
|
35
36
|
this.migrationsYetToRun = migrationsYetToRun;
|
|
37
|
+
this.cause = cause;
|
|
38
|
+
this.migrationName = migrationName;
|
|
39
|
+
this.migrationsYetToRun = migrationsYetToRun;
|
|
40
|
+
this.cause = cause;
|
|
36
41
|
}
|
|
37
42
|
}
|
|
38
43
|
exports.MigrationExecutionError = MigrationExecutionError;
|
|
39
44
|
class MigrationCheckPointError extends ForgeSQLError {
|
|
40
45
|
migrationName;
|
|
41
46
|
migrationsYetToRun;
|
|
42
|
-
|
|
47
|
+
cause;
|
|
48
|
+
constructor(migrationName, migrationsYetToRun, cause) {
|
|
43
49
|
super(`Failed to checkpoint after running migration with name ${migrationName}`);
|
|
44
50
|
this.migrationName = migrationName;
|
|
45
51
|
this.migrationsYetToRun = migrationsYetToRun;
|
|
52
|
+
this.cause = cause;
|
|
53
|
+
this.migrationName = migrationName;
|
|
54
|
+
this.migrationsYetToRun = migrationsYetToRun;
|
|
55
|
+
this.cause = cause;
|
|
46
56
|
}
|
|
47
57
|
}
|
|
48
58
|
exports.MigrationCheckPointError = MigrationCheckPointError;
|
package/out/migration.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"migration.d.ts","sourceRoot":"","sources":["../src/migration.ts"],"names":[],"mappings":"AAAA,OAAO,EAAO,SAAS,EAAE,MAAM,OAAO,CAAC;AAGvC,aAAK,eAAe,GAAG;IACrB,IAAI,EAAE,MAAM,CAAC;IACb,SAAS,EAAE,MAAM,CAAC;CACnB,CAAC;AAEF,aAAK,gBAAgB,GAAG;IACtB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,EAAE,IAAI,CAAC;CAClB,CAAC;AAcF,qBAAa,eAAe;IAC1B,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAY;IACtC,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAoB;gBAEnC,SAAS,EAAE,SAAS;IAK1B,UAAU;IAIT,OAAO,CAAC,IAAI,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,eAAe;IAQzD,WAAW,IAAI,eAAe,EAAE;IAIjC,IAAI,IAAI,OAAO,CAAC,gBAAgB,EAAE,CAAC;IAYnC,GAAG,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;
|
|
1
|
+
{"version":3,"file":"migration.d.ts","sourceRoot":"","sources":["../src/migration.ts"],"names":[],"mappings":"AAAA,OAAO,EAAO,SAAS,EAAE,MAAM,OAAO,CAAC;AAGvC,aAAK,eAAe,GAAG;IACrB,IAAI,EAAE,MAAM,CAAC;IACb,SAAS,EAAE,MAAM,CAAC;CACnB,CAAC;AAEF,aAAK,gBAAgB,GAAG;IACtB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,EAAE,IAAI,CAAC;CAClB,CAAC;AAcF,qBAAa,eAAe;IAC1B,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAY;IACtC,OAAO,CAAC,QAAQ,CAAC,UAAU,CAAoB;gBAEnC,SAAS,EAAE,SAAS;IAK1B,UAAU;IAIT,OAAO,CAAC,IAAI,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,eAAe;IAQzD,WAAW,IAAI,eAAe,EAAE;IAIjC,IAAI,IAAI,OAAO,CAAC,gBAAgB,EAAE,CAAC;IAYnC,GAAG,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;CAmC/B;AAED,eAAO,MAAM,eAAe,iBAA2B,CAAC"}
|
package/out/migration.js
CHANGED
|
@@ -14,7 +14,7 @@ class MigrationRunner {
|
|
|
14
14
|
this.migrations = [];
|
|
15
15
|
}
|
|
16
16
|
async initialize() {
|
|
17
|
-
await this.sqlClient.
|
|
17
|
+
await this.sqlClient.executeDDL(SCHEMA_VERSION_TABLE_CREATE_QUERY);
|
|
18
18
|
}
|
|
19
19
|
enqueue(name, statement) {
|
|
20
20
|
if (this.migrations.some((migration) => migration.name === name)) {
|
|
@@ -27,7 +27,7 @@ class MigrationRunner {
|
|
|
27
27
|
return this.migrations;
|
|
28
28
|
}
|
|
29
29
|
async list() {
|
|
30
|
-
const result = await this.sqlClient.
|
|
30
|
+
const result = await this.sqlClient.executeDDL(LIST_MIGRATIONS_QUERY);
|
|
31
31
|
const migrations = new Array();
|
|
32
32
|
for (const row of result.rows) {
|
|
33
33
|
const migratedAt = new Date(row.migratedAt);
|
|
@@ -44,16 +44,16 @@ class MigrationRunner {
|
|
|
44
44
|
const migrationsSuccessfullyRun = [];
|
|
45
45
|
for (const migration of migrationsToRun) {
|
|
46
46
|
try {
|
|
47
|
-
await this.sqlClient.
|
|
47
|
+
await this.sqlClient.executeDDL(migration.statement);
|
|
48
48
|
}
|
|
49
49
|
catch (error) {
|
|
50
|
-
throw new errors_1.MigrationExecutionError(migration.name, migrationsToRun.map((m) => m.name));
|
|
50
|
+
throw new errors_1.MigrationExecutionError(migration.name, migrationsToRun.map((m) => m.name), error);
|
|
51
51
|
}
|
|
52
52
|
try {
|
|
53
53
|
await this.sqlClient.prepare(INSERT_SCHEMA_VERSION_QUERY).bindParams(migration.name).execute();
|
|
54
54
|
}
|
|
55
55
|
catch (error) {
|
|
56
|
-
throw new errors_1.MigrationCheckPointError(migration.name, migrationsToRun.map((m) => m.name));
|
|
56
|
+
throw new errors_1.MigrationCheckPointError(migration.name, migrationsToRun.map((m) => m.name), error);
|
|
57
57
|
}
|
|
58
58
|
migrationsSuccessfullyRun.push(migration.name);
|
|
59
59
|
}
|
package/out/sql.d.ts
CHANGED
|
@@ -1,11 +1,19 @@
|
|
|
1
1
|
import { Result } from './utils/types';
|
|
2
|
-
import {
|
|
2
|
+
import { SqlStatement } from './sql-statement';
|
|
3
|
+
export declare const SQL_API_ENDPOINTS: {
|
|
4
|
+
readonly EXECUTE: "/api/v1/execute";
|
|
5
|
+
readonly EXECUTE_DDL: "/api/v1/execute/ddl";
|
|
6
|
+
};
|
|
7
|
+
declare type SqlAPIEndPoints = (typeof SQL_API_ENDPOINTS)[keyof typeof SQL_API_ENDPOINTS];
|
|
3
8
|
export declare class SqlClient {
|
|
4
9
|
private sendRequest;
|
|
5
|
-
storageApi
|
|
6
|
-
|
|
10
|
+
private storageApi;
|
|
11
|
+
private storageApiWithOptions;
|
|
12
|
+
prepare<DataType>(query: string, endpoint?: SqlAPIEndPoints): SqlStatement<Result<DataType>>;
|
|
7
13
|
executeRaw<DataType>(query: string): Promise<Result<DataType>>;
|
|
8
14
|
_provision(): Promise<void>;
|
|
15
|
+
executeDDL<DataType>(query: string): Promise<Result<DataType>>;
|
|
9
16
|
}
|
|
10
17
|
export declare const sql: SqlClient;
|
|
18
|
+
export {};
|
|
11
19
|
//# sourceMappingURL=sql.d.ts.map
|
package/out/sql.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"sql.d.ts","sourceRoot":"","sources":["../src/sql.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AACvC,OAAO,
|
|
1
|
+
{"version":3,"file":"sql.d.ts","sourceRoot":"","sources":["../src/sql.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AACvC,OAAO,EAAgC,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAI7E,eAAO,MAAM,iBAAiB;;;CAGpB,CAAC;AAEX,aAAK,eAAe,GAAG,CAAC,OAAO,iBAAiB,CAAC,CAAC,MAAM,OAAO,iBAAiB,CAAC,CAAC;AAElF,qBAAa,SAAS;YACN,WAAW;YAYX,UAAU;YAsBV,qBAAqB;IAYnC,OAAO,CAAC,QAAQ,EACd,KAAK,EAAE,MAAM,EACb,QAAQ,GAAE,eAA2C,GACpD,YAAY,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;IAU3B,UAAU,CAAC,QAAQ,EAAE,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;IAK9D,UAAU,IAAI,OAAO,CAAC,IAAI,CAAC;IAkB3B,UAAU,CAAC,QAAQ,EAAE,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;CAGrE;AAED,eAAO,MAAM,GAAG,WAAkB,CAAC"}
|
package/out/sql.js
CHANGED
|
@@ -1,10 +1,14 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.sql = exports.SqlClient = void 0;
|
|
3
|
+
exports.sql = exports.SqlClient = exports.SQL_API_ENDPOINTS = void 0;
|
|
4
4
|
const api_1 = require("@forge/api");
|
|
5
5
|
const sql_statement_1 = require("./sql-statement");
|
|
6
6
|
const error_handling_1 = require("./utils/error-handling");
|
|
7
7
|
const errors_1 = require("./errors");
|
|
8
|
+
exports.SQL_API_ENDPOINTS = {
|
|
9
|
+
EXECUTE: '/api/v1/execute',
|
|
10
|
+
EXECUTE_DDL: '/api/v1/execute/ddl'
|
|
11
|
+
};
|
|
8
12
|
class SqlClient {
|
|
9
13
|
async sendRequest(path, options) {
|
|
10
14
|
const response = await (0, api_1.__fetchProduct)({ provider: 'app', remote: 'sql' })(path, {
|
|
@@ -17,8 +21,8 @@ class SqlClient {
|
|
|
17
21
|
});
|
|
18
22
|
return response;
|
|
19
23
|
}
|
|
20
|
-
async storageApi(query, params = [], method = 'all') {
|
|
21
|
-
const response = await this.sendRequest(
|
|
24
|
+
async storageApi(query, params = [], method = 'all', endpoint = exports.SQL_API_ENDPOINTS.EXECUTE) {
|
|
25
|
+
const response = await this.sendRequest(endpoint, {
|
|
22
26
|
method: 'POST',
|
|
23
27
|
body: JSON.stringify({ query, params, method })
|
|
24
28
|
});
|
|
@@ -31,11 +35,16 @@ class SqlClient {
|
|
|
31
35
|
throw new errors_1.ForgeSQLError(`Unexpected error. Response was not valid JSON: ${responseText}`);
|
|
32
36
|
}
|
|
33
37
|
}
|
|
34
|
-
|
|
35
|
-
|
|
38
|
+
async storageApiWithOptions(query, options = {}) {
|
|
39
|
+
const { endpoint = exports.SQL_API_ENDPOINTS.EXECUTE, params = [], method = 'all' } = options;
|
|
40
|
+
return await this.storageApi(query, params, method, endpoint);
|
|
41
|
+
}
|
|
42
|
+
prepare(query, endpoint = exports.SQL_API_ENDPOINTS.EXECUTE) {
|
|
43
|
+
const remoteSqlApi = (query, params, method) => this.storageApiWithOptions(query, { endpoint, params, method });
|
|
44
|
+
return new sql_statement_1.SqlStatement(query, remoteSqlApi);
|
|
36
45
|
}
|
|
37
46
|
async executeRaw(query) {
|
|
38
|
-
return await this.prepare(query).execute();
|
|
47
|
+
return await this.prepare(query, exports.SQL_API_ENDPOINTS.EXECUTE).execute();
|
|
39
48
|
}
|
|
40
49
|
async _provision() {
|
|
41
50
|
const response = await this.sendRequest('/api/v1/provision', {
|
|
@@ -43,6 +52,9 @@ class SqlClient {
|
|
|
43
52
|
});
|
|
44
53
|
await (0, error_handling_1.checkResponseError)(response, 'Unexpected error in provision request');
|
|
45
54
|
}
|
|
55
|
+
async executeDDL(query) {
|
|
56
|
+
return await this.prepare(query, exports.SQL_API_ENDPOINTS.EXECUTE_DDL).execute();
|
|
57
|
+
}
|
|
46
58
|
}
|
|
47
59
|
exports.SqlClient = SqlClient;
|
|
48
60
|
exports.sql = new SqlClient();
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"error-handling.d.ts","sourceRoot":"","sources":["../../src/utils/error-handling.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,YAAY,CAAC;AAEzC,OAAO,EAA2B,UAAU,EAAoB,MAAM,WAAW,CAAC;AAGlF,wBAAgB,YAAY,CAAC,IAAI,EAAE,OAAO,GAAG,IAAI,IAAI,UAAU,CAO9D;AASD,wBAAsB,kBAAkB,CAAC,QAAQ,EAAE,WAAW,EAAE,OAAO,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,
|
|
1
|
+
{"version":3,"file":"error-handling.d.ts","sourceRoot":"","sources":["../../src/utils/error-handling.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,YAAY,CAAC;AAEzC,OAAO,EAA2B,UAAU,EAAoB,MAAM,WAAW,CAAC;AAGlF,wBAAgB,YAAY,CAAC,IAAI,EAAE,OAAO,GAAG,IAAI,IAAI,UAAU,CAO9D;AASD,wBAAsB,kBAAkB,CAAC,QAAQ,EAAE,WAAW,EAAE,OAAO,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAsC/F"}
|