@squadbase/vite-server 0.0.1-build-12 → 0.0.1-build-14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/index.js +48 -486
- package/dist/index.d.ts +6 -10
- package/dist/index.js +120 -500
- package/dist/main.js +54 -499
- package/dist/vite-plugin.js +34 -445
- package/package.json +2 -1
package/dist/vite-plugin.js
CHANGED
|
@@ -14,19 +14,7 @@ import path2 from "path";
|
|
|
14
14
|
import { readFileSync, watch as fsWatch } from "fs";
|
|
15
15
|
import { readFile } from "fs/promises";
|
|
16
16
|
import path from "path";
|
|
17
|
-
|
|
18
|
-
// src/connector-client/postgresql.ts
|
|
19
|
-
import pg from "pg";
|
|
20
|
-
var { Pool } = pg;
|
|
21
|
-
function createPostgreSQLClient(connectionString) {
|
|
22
|
-
const pool = new Pool({ connectionString, ssl: { rejectUnauthorized: false } });
|
|
23
|
-
return {
|
|
24
|
-
async query(sql, params) {
|
|
25
|
-
const result = await pool.query(sql, params);
|
|
26
|
-
return { rows: result.rows };
|
|
27
|
-
}
|
|
28
|
-
};
|
|
29
|
-
}
|
|
17
|
+
import { connectors } from "@squadbase/connectors";
|
|
30
18
|
|
|
31
19
|
// src/connector-client/env.ts
|
|
32
20
|
function resolveEnvVar(entry, key, connectionId) {
|
|
@@ -46,392 +34,8 @@ function resolveEnvVarOptional(entry, key) {
|
|
|
46
34
|
return process.env[envVarName] || void 0;
|
|
47
35
|
}
|
|
48
36
|
|
|
49
|
-
// src/connector-client/bigquery.ts
|
|
50
|
-
function createBigQueryClient(entry, connectionId) {
|
|
51
|
-
const projectId = resolveEnvVar(entry, "project-id", connectionId);
|
|
52
|
-
const serviceAccountJsonBase64 = resolveEnvVar(entry, "service-account-key-json-base64", connectionId);
|
|
53
|
-
const serviceAccountJson = Buffer.from(serviceAccountJsonBase64, "base64").toString("utf-8");
|
|
54
|
-
let gcpCredentials;
|
|
55
|
-
try {
|
|
56
|
-
gcpCredentials = JSON.parse(serviceAccountJson);
|
|
57
|
-
} catch {
|
|
58
|
-
throw new Error(
|
|
59
|
-
`BigQuery service account JSON (decoded from base64) is not valid JSON for connectionId "${connectionId}"`
|
|
60
|
-
);
|
|
61
|
-
}
|
|
62
|
-
return {
|
|
63
|
-
async query(sql) {
|
|
64
|
-
const { BigQuery } = await import("@google-cloud/bigquery");
|
|
65
|
-
const bq = new BigQuery({ projectId, credentials: gcpCredentials });
|
|
66
|
-
const [job] = await bq.createQueryJob({ query: sql });
|
|
67
|
-
const [allRows] = await job.getQueryResults({ timeoutMs: 3e4 });
|
|
68
|
-
return { rows: allRows };
|
|
69
|
-
}
|
|
70
|
-
};
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
// src/connection.ts
|
|
74
|
-
import { getContext } from "hono/context-storage";
|
|
75
|
-
import { getCookie } from "hono/cookie";
|
|
76
|
-
var APP_SESSION_COOKIE_NAME = "__Host-squadbase-session";
|
|
77
|
-
var PREVIEW_SESSION_COOKIE_NAME = "squadbase-preview-session";
|
|
78
|
-
var APP_BASE_DOMAIN = "squadbase.app";
|
|
79
|
-
var PREVIEW_BASE_DOMAIN = "preview.app.squadbase.dev";
|
|
80
|
-
var SANDBOX_ID_ENV_NAME = "INTERNAL_SQUADBASE_SANDBOX_ID";
|
|
81
|
-
var MACHINE_CREDENTIAL_ENV_NAME = "INTERNAL_SQUADBASE_OAUTH_MACHINE_CREDENTIAL";
|
|
82
|
-
function resolveProxyUrl(connectionId) {
|
|
83
|
-
const connectionPath = `/_sqcore/connections/${connectionId}/request`;
|
|
84
|
-
const sandboxId = process.env[SANDBOX_ID_ENV_NAME];
|
|
85
|
-
if (sandboxId) {
|
|
86
|
-
const baseDomain2 = process.env["SQUADBASE_PREVIEW_BASE_DOMAIN"] ?? PREVIEW_BASE_DOMAIN;
|
|
87
|
-
return `https://${sandboxId}.${baseDomain2}${connectionPath}`;
|
|
88
|
-
}
|
|
89
|
-
const projectId = process.env["SQUADBASE_PROJECT_ID"];
|
|
90
|
-
if (!projectId) {
|
|
91
|
-
throw new Error(
|
|
92
|
-
"Project ID is required. Please set SQUADBASE_PROJECT_ID environment variable."
|
|
93
|
-
);
|
|
94
|
-
}
|
|
95
|
-
const baseDomain = process.env["SQUADBASE_APP_BASE_DOMAIN"] ?? APP_BASE_DOMAIN;
|
|
96
|
-
return `https://${projectId}.${baseDomain}${connectionPath}`;
|
|
97
|
-
}
|
|
98
|
-
function resolveAuthHeaders() {
|
|
99
|
-
const machineCredential = process.env[MACHINE_CREDENTIAL_ENV_NAME];
|
|
100
|
-
if (machineCredential) {
|
|
101
|
-
return { Authorization: `Bearer ${machineCredential}` };
|
|
102
|
-
}
|
|
103
|
-
const c = getContext();
|
|
104
|
-
const cookies = getCookie(c);
|
|
105
|
-
const previewSession = cookies[PREVIEW_SESSION_COOKIE_NAME];
|
|
106
|
-
if (previewSession) {
|
|
107
|
-
return {
|
|
108
|
-
Cookie: `${PREVIEW_SESSION_COOKIE_NAME}=${previewSession}`
|
|
109
|
-
};
|
|
110
|
-
}
|
|
111
|
-
const appSession = cookies[APP_SESSION_COOKIE_NAME];
|
|
112
|
-
if (appSession) {
|
|
113
|
-
return { Authorization: `Bearer ${appSession}` };
|
|
114
|
-
}
|
|
115
|
-
throw new Error(
|
|
116
|
-
"No authentication method available for connection proxy. Expected one of: INTERNAL_SQUADBASE_OAUTH_MACHINE_CREDENTIAL env var, preview session cookie, or app session cookie."
|
|
117
|
-
);
|
|
118
|
-
}
|
|
119
|
-
function connection(connectionId) {
|
|
120
|
-
return {
|
|
121
|
-
async fetch(url, options) {
|
|
122
|
-
const proxyUrl = resolveProxyUrl(connectionId);
|
|
123
|
-
const authHeaders = resolveAuthHeaders();
|
|
124
|
-
return await fetch(proxyUrl, {
|
|
125
|
-
method: "POST",
|
|
126
|
-
headers: {
|
|
127
|
-
"Content-Type": "application/json",
|
|
128
|
-
...authHeaders
|
|
129
|
-
},
|
|
130
|
-
body: JSON.stringify({
|
|
131
|
-
url,
|
|
132
|
-
method: options?.method,
|
|
133
|
-
headers: options?.headers,
|
|
134
|
-
body: options?.body,
|
|
135
|
-
timeoutMs: options?.timeoutMs
|
|
136
|
-
})
|
|
137
|
-
});
|
|
138
|
-
}
|
|
139
|
-
};
|
|
140
|
-
}
|
|
141
|
-
|
|
142
|
-
// src/connector-client/bigquery-oauth.ts
|
|
143
|
-
var MAX_RESULTS = 1e4;
|
|
144
|
-
var POLL_INTERVAL_MS = 1e3;
|
|
145
|
-
var POLL_TIMEOUT_MS = 12e4;
|
|
146
|
-
function flattenRows(fields, rows) {
|
|
147
|
-
return rows.map((row) => {
|
|
148
|
-
const obj = {};
|
|
149
|
-
for (let i = 0; i < fields.length; i++) {
|
|
150
|
-
obj[fields[i].name] = row.f[i].v;
|
|
151
|
-
}
|
|
152
|
-
return obj;
|
|
153
|
-
});
|
|
154
|
-
}
|
|
155
|
-
function createBigQueryOAuthClient(entry, connectionId) {
|
|
156
|
-
const projectId = resolveEnvVar(entry, "project-id", connectionId);
|
|
157
|
-
const baseUrl = `https://bigquery.googleapis.com/bigquery/v2/projects/${projectId}`;
|
|
158
|
-
return {
|
|
159
|
-
async query(sql) {
|
|
160
|
-
const conn = connection(connectionId);
|
|
161
|
-
const res = await conn.fetch(`${baseUrl}/queries`, {
|
|
162
|
-
method: "POST",
|
|
163
|
-
body: {
|
|
164
|
-
query: sql,
|
|
165
|
-
useLegacySql: false,
|
|
166
|
-
maxResults: MAX_RESULTS
|
|
167
|
-
}
|
|
168
|
-
});
|
|
169
|
-
if (!res.ok) {
|
|
170
|
-
const text = await res.text().catch(() => res.statusText);
|
|
171
|
-
throw new Error(`BigQuery query failed: HTTP ${res.status} ${text}`);
|
|
172
|
-
}
|
|
173
|
-
let data = await res.json();
|
|
174
|
-
if (data.errors?.length) {
|
|
175
|
-
throw new Error(
|
|
176
|
-
`BigQuery query error: ${data.errors.map((e) => e.message).join("; ")}`
|
|
177
|
-
);
|
|
178
|
-
}
|
|
179
|
-
if (!data.jobComplete) {
|
|
180
|
-
const jobId = data.jobReference.jobId;
|
|
181
|
-
const location = data.jobReference.location;
|
|
182
|
-
const deadline = Date.now() + POLL_TIMEOUT_MS;
|
|
183
|
-
while (!data.jobComplete) {
|
|
184
|
-
if (Date.now() > deadline) {
|
|
185
|
-
throw new Error(
|
|
186
|
-
`BigQuery query timed out after ${POLL_TIMEOUT_MS / 1e3}s (jobId: ${jobId})`
|
|
187
|
-
);
|
|
188
|
-
}
|
|
189
|
-
await new Promise((r) => setTimeout(r, POLL_INTERVAL_MS));
|
|
190
|
-
const params = new URLSearchParams({
|
|
191
|
-
maxResults: String(MAX_RESULTS)
|
|
192
|
-
});
|
|
193
|
-
if (location) params.set("location", location);
|
|
194
|
-
const pollRes = await conn.fetch(
|
|
195
|
-
`${baseUrl}/queries/${jobId}?${params}`,
|
|
196
|
-
{ method: "GET" }
|
|
197
|
-
);
|
|
198
|
-
if (!pollRes.ok) {
|
|
199
|
-
const text = await pollRes.text().catch(() => pollRes.statusText);
|
|
200
|
-
throw new Error(
|
|
201
|
-
`BigQuery poll failed: HTTP ${pollRes.status} ${text}`
|
|
202
|
-
);
|
|
203
|
-
}
|
|
204
|
-
data = await pollRes.json();
|
|
205
|
-
if (data.errors?.length) {
|
|
206
|
-
throw new Error(
|
|
207
|
-
`BigQuery query error: ${data.errors.map((e) => e.message).join("; ")}`
|
|
208
|
-
);
|
|
209
|
-
}
|
|
210
|
-
}
|
|
211
|
-
}
|
|
212
|
-
const fields = data.schema?.fields ?? [];
|
|
213
|
-
const rawRows = data.rows ?? [];
|
|
214
|
-
return { rows: flattenRows(fields, rawRows) };
|
|
215
|
-
}
|
|
216
|
-
};
|
|
217
|
-
}
|
|
218
|
-
|
|
219
|
-
// src/connector-client/snowflake.ts
|
|
220
|
-
function createSnowflakeClient(entry, connectionId) {
|
|
221
|
-
const accountIdentifier = resolveEnvVar(entry, "account", connectionId);
|
|
222
|
-
const user = resolveEnvVar(entry, "user", connectionId);
|
|
223
|
-
const role = resolveEnvVar(entry, "role", connectionId);
|
|
224
|
-
const warehouse = resolveEnvVar(entry, "warehouse", connectionId);
|
|
225
|
-
const privateKeyBase64 = resolveEnvVar(entry, "private-key-base64", connectionId);
|
|
226
|
-
const privateKey = Buffer.from(privateKeyBase64, "base64").toString("utf-8");
|
|
227
|
-
return {
|
|
228
|
-
async query(sql) {
|
|
229
|
-
const snowflake = (await import("snowflake-sdk")).default;
|
|
230
|
-
snowflake.configure({ logLevel: "ERROR" });
|
|
231
|
-
const connection2 = snowflake.createConnection({
|
|
232
|
-
account: accountIdentifier,
|
|
233
|
-
username: user,
|
|
234
|
-
role,
|
|
235
|
-
warehouse,
|
|
236
|
-
authenticator: "SNOWFLAKE_JWT",
|
|
237
|
-
privateKey
|
|
238
|
-
});
|
|
239
|
-
await new Promise((resolve, reject) => {
|
|
240
|
-
connection2.connect((err) => {
|
|
241
|
-
if (err) reject(new Error(`Snowflake connect failed: ${err.message}`));
|
|
242
|
-
else resolve();
|
|
243
|
-
});
|
|
244
|
-
});
|
|
245
|
-
const rows = await new Promise((resolve, reject) => {
|
|
246
|
-
connection2.execute({
|
|
247
|
-
sqlText: sql,
|
|
248
|
-
complete: (err, _stmt, rows2) => {
|
|
249
|
-
if (err) reject(new Error(`Snowflake query failed: ${err.message}`));
|
|
250
|
-
else resolve(rows2 ?? []);
|
|
251
|
-
}
|
|
252
|
-
});
|
|
253
|
-
});
|
|
254
|
-
connection2.destroy((err) => {
|
|
255
|
-
if (err) console.warn(`[connector-client] Snowflake destroy error: ${err.message}`);
|
|
256
|
-
});
|
|
257
|
-
return { rows };
|
|
258
|
-
}
|
|
259
|
-
};
|
|
260
|
-
}
|
|
261
|
-
|
|
262
|
-
// src/connector-client/mysql.ts
|
|
263
|
-
function createMySQLClient(entry, connectionId) {
|
|
264
|
-
const connectionUrl = resolveEnvVar(entry, "connection-url", connectionId);
|
|
265
|
-
let poolPromise = null;
|
|
266
|
-
function getPool() {
|
|
267
|
-
if (!poolPromise) {
|
|
268
|
-
poolPromise = import("mysql2/promise").then(
|
|
269
|
-
(mysql) => mysql.default.createPool(connectionUrl)
|
|
270
|
-
);
|
|
271
|
-
}
|
|
272
|
-
return poolPromise;
|
|
273
|
-
}
|
|
274
|
-
return {
|
|
275
|
-
async query(sql, params) {
|
|
276
|
-
const pool = await getPool();
|
|
277
|
-
const [rows] = await pool.execute(sql, params);
|
|
278
|
-
return { rows };
|
|
279
|
-
}
|
|
280
|
-
};
|
|
281
|
-
}
|
|
282
|
-
|
|
283
|
-
// src/connector-client/aws-athena.ts
|
|
284
|
-
function createAthenaClient(entry, connectionId) {
|
|
285
|
-
const region = resolveEnvVar(entry, "aws-region", connectionId);
|
|
286
|
-
const accessKeyId = resolveEnvVar(entry, "aws-access-key-id", connectionId);
|
|
287
|
-
const secretAccessKey = resolveEnvVar(entry, "aws-secret-access-key", connectionId);
|
|
288
|
-
const workgroup = resolveEnvVarOptional(entry, "workgroup") ?? "primary";
|
|
289
|
-
const outputLocation = resolveEnvVarOptional(entry, "output-location");
|
|
290
|
-
return {
|
|
291
|
-
async query(sql) {
|
|
292
|
-
const {
|
|
293
|
-
AthenaClient,
|
|
294
|
-
StartQueryExecutionCommand,
|
|
295
|
-
GetQueryExecutionCommand,
|
|
296
|
-
GetQueryResultsCommand
|
|
297
|
-
} = await import("@aws-sdk/client-athena");
|
|
298
|
-
const client = new AthenaClient({
|
|
299
|
-
region,
|
|
300
|
-
credentials: { accessKeyId, secretAccessKey }
|
|
301
|
-
});
|
|
302
|
-
const startParams = {
|
|
303
|
-
QueryString: sql,
|
|
304
|
-
WorkGroup: workgroup
|
|
305
|
-
};
|
|
306
|
-
if (outputLocation) {
|
|
307
|
-
startParams.ResultConfiguration = { OutputLocation: outputLocation };
|
|
308
|
-
}
|
|
309
|
-
const { QueryExecutionId } = await client.send(
|
|
310
|
-
new StartQueryExecutionCommand(startParams)
|
|
311
|
-
);
|
|
312
|
-
if (!QueryExecutionId) throw new Error("Athena: failed to start query execution");
|
|
313
|
-
while (true) {
|
|
314
|
-
const { QueryExecution } = await client.send(
|
|
315
|
-
new GetQueryExecutionCommand({ QueryExecutionId })
|
|
316
|
-
);
|
|
317
|
-
const state = QueryExecution?.Status?.State;
|
|
318
|
-
if (state === "SUCCEEDED") break;
|
|
319
|
-
if (state === "FAILED") {
|
|
320
|
-
throw new Error(
|
|
321
|
-
`Athena query failed: ${QueryExecution?.Status?.StateChangeReason ?? "unknown"}`
|
|
322
|
-
);
|
|
323
|
-
}
|
|
324
|
-
if (state === "CANCELLED") throw new Error("Athena query was cancelled");
|
|
325
|
-
await new Promise((r) => setTimeout(r, 500));
|
|
326
|
-
}
|
|
327
|
-
const { ResultSet } = await client.send(
|
|
328
|
-
new GetQueryResultsCommand({ QueryExecutionId })
|
|
329
|
-
);
|
|
330
|
-
const resultRows = ResultSet?.Rows ?? [];
|
|
331
|
-
if (resultRows.length === 0) return { rows: [] };
|
|
332
|
-
const headers = resultRows[0].Data?.map((d) => d.VarCharValue ?? "") ?? [];
|
|
333
|
-
const rows = resultRows.slice(1).map((row) => {
|
|
334
|
-
const obj = {};
|
|
335
|
-
row.Data?.forEach((d, i) => {
|
|
336
|
-
obj[headers[i]] = d.VarCharValue ?? null;
|
|
337
|
-
});
|
|
338
|
-
return obj;
|
|
339
|
-
});
|
|
340
|
-
return { rows };
|
|
341
|
-
}
|
|
342
|
-
};
|
|
343
|
-
}
|
|
344
|
-
|
|
345
|
-
// src/connector-client/redshift.ts
|
|
346
|
-
function createRedshiftClient(entry, connectionId) {
|
|
347
|
-
const region = resolveEnvVar(entry, "aws-region", connectionId);
|
|
348
|
-
const accessKeyId = resolveEnvVar(entry, "aws-access-key-id", connectionId);
|
|
349
|
-
const secretAccessKey = resolveEnvVar(entry, "aws-secret-access-key", connectionId);
|
|
350
|
-
const database = resolveEnvVar(entry, "database", connectionId);
|
|
351
|
-
const clusterIdentifier = resolveEnvVarOptional(entry, "cluster-identifier");
|
|
352
|
-
const workgroupName = resolveEnvVarOptional(entry, "workgroup-name");
|
|
353
|
-
const secretArn = resolveEnvVarOptional(entry, "secret-arn");
|
|
354
|
-
const dbUser = resolveEnvVarOptional(entry, "db-user");
|
|
355
|
-
return {
|
|
356
|
-
async query(sql) {
|
|
357
|
-
const {
|
|
358
|
-
RedshiftDataClient,
|
|
359
|
-
ExecuteStatementCommand,
|
|
360
|
-
DescribeStatementCommand,
|
|
361
|
-
GetStatementResultCommand
|
|
362
|
-
} = await import("@aws-sdk/client-redshift-data");
|
|
363
|
-
const client = new RedshiftDataClient({
|
|
364
|
-
region,
|
|
365
|
-
credentials: { accessKeyId, secretAccessKey }
|
|
366
|
-
});
|
|
367
|
-
const executeParams = {
|
|
368
|
-
Sql: sql,
|
|
369
|
-
Database: database
|
|
370
|
-
};
|
|
371
|
-
if (clusterIdentifier) executeParams.ClusterIdentifier = clusterIdentifier;
|
|
372
|
-
if (workgroupName) executeParams.WorkgroupName = workgroupName;
|
|
373
|
-
if (secretArn) executeParams.SecretArn = secretArn;
|
|
374
|
-
if (dbUser) executeParams.DbUser = dbUser;
|
|
375
|
-
const { Id } = await client.send(
|
|
376
|
-
new ExecuteStatementCommand(executeParams)
|
|
377
|
-
);
|
|
378
|
-
if (!Id) throw new Error("Redshift: failed to start statement execution");
|
|
379
|
-
while (true) {
|
|
380
|
-
const desc = await client.send(new DescribeStatementCommand({ Id }));
|
|
381
|
-
const status = desc.Status;
|
|
382
|
-
if (status === "FINISHED") break;
|
|
383
|
-
if (status === "FAILED") {
|
|
384
|
-
throw new Error(`Redshift query failed: ${desc.Error ?? "unknown"}`);
|
|
385
|
-
}
|
|
386
|
-
if (status === "ABORTED") throw new Error("Redshift query was aborted");
|
|
387
|
-
await new Promise((r) => setTimeout(r, 500));
|
|
388
|
-
}
|
|
389
|
-
const result = await client.send(new GetStatementResultCommand({ Id }));
|
|
390
|
-
const columns = result.ColumnMetadata?.map((c) => c.name ?? "") ?? [];
|
|
391
|
-
const rows = (result.Records ?? []).map((record) => {
|
|
392
|
-
const obj = {};
|
|
393
|
-
record.forEach((field, i) => {
|
|
394
|
-
const col = columns[i];
|
|
395
|
-
const value = field.stringValue ?? field.longValue ?? field.doubleValue ?? field.booleanValue ?? (field.isNull ? null : field.blobValue ?? null);
|
|
396
|
-
obj[col] = value;
|
|
397
|
-
});
|
|
398
|
-
return obj;
|
|
399
|
-
});
|
|
400
|
-
return { rows };
|
|
401
|
-
}
|
|
402
|
-
};
|
|
403
|
-
}
|
|
404
|
-
|
|
405
|
-
// src/connector-client/databricks.ts
|
|
406
|
-
function createDatabricksClient(entry, connectionId) {
|
|
407
|
-
const host = resolveEnvVar(entry, "host", connectionId);
|
|
408
|
-
const httpPath = resolveEnvVar(entry, "http-path", connectionId);
|
|
409
|
-
const token = resolveEnvVar(entry, "token", connectionId);
|
|
410
|
-
return {
|
|
411
|
-
async query(sql) {
|
|
412
|
-
const { DBSQLClient } = await import("@databricks/sql");
|
|
413
|
-
const client = new DBSQLClient();
|
|
414
|
-
await client.connect({ host, path: httpPath, token });
|
|
415
|
-
try {
|
|
416
|
-
const session = await client.openSession();
|
|
417
|
-
try {
|
|
418
|
-
const operation = await session.executeStatement(sql);
|
|
419
|
-
const result = await operation.fetchAll();
|
|
420
|
-
await operation.close();
|
|
421
|
-
return { rows: result };
|
|
422
|
-
} finally {
|
|
423
|
-
await session.close();
|
|
424
|
-
}
|
|
425
|
-
} finally {
|
|
426
|
-
await client.close();
|
|
427
|
-
}
|
|
428
|
-
}
|
|
429
|
-
};
|
|
430
|
-
}
|
|
431
|
-
|
|
432
37
|
// src/connector-client/registry.ts
|
|
433
38
|
function createConnectorRegistry() {
|
|
434
|
-
const clientCache = /* @__PURE__ */ new Map();
|
|
435
39
|
function getConnectionsFilePath() {
|
|
436
40
|
return process.env.CONNECTIONS_PATH ?? path.join(process.cwd(), ".squadbase/connections.json");
|
|
437
41
|
}
|
|
@@ -444,56 +48,28 @@ function createConnectorRegistry() {
|
|
|
444
48
|
return {};
|
|
445
49
|
}
|
|
446
50
|
}
|
|
447
|
-
async function
|
|
51
|
+
async function getQuery2(connectionId) {
|
|
448
52
|
const connections = await loadConnections2();
|
|
449
53
|
const entry = connections[connectionId];
|
|
450
54
|
if (!entry) {
|
|
451
|
-
throw new Error(
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
const cached = clientCache.get(connectionId);
|
|
455
|
-
if (cached) return { client: cached, connectorSlug };
|
|
456
|
-
if (connectorSlug === "snowflake") {
|
|
457
|
-
return { client: createSnowflakeClient(entry, connectionId), connectorSlug };
|
|
458
|
-
}
|
|
459
|
-
if (connectorSlug === "bigquery") {
|
|
460
|
-
if (entry.connector.authType === "oauth") {
|
|
461
|
-
return { client: createBigQueryOAuthClient(entry, connectionId), connectorSlug };
|
|
462
|
-
}
|
|
463
|
-
return { client: createBigQueryClient(entry, connectionId), connectorSlug };
|
|
464
|
-
}
|
|
465
|
-
if (connectorSlug === "athena") {
|
|
466
|
-
return { client: createAthenaClient(entry, connectionId), connectorSlug };
|
|
467
|
-
}
|
|
468
|
-
if (connectorSlug === "redshift") {
|
|
469
|
-
return { client: createRedshiftClient(entry, connectionId), connectorSlug };
|
|
470
|
-
}
|
|
471
|
-
if (connectorSlug === "databricks") {
|
|
472
|
-
return { client: createDatabricksClient(entry, connectionId), connectorSlug };
|
|
55
|
+
throw new Error(
|
|
56
|
+
`connection '${connectionId}' not found in .squadbase/connections.json`
|
|
57
|
+
);
|
|
473
58
|
}
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
59
|
+
const { slug, authType } = entry.connector;
|
|
60
|
+
const plugin = connectors.findByKey(slug, authType);
|
|
61
|
+
if (!plugin) {
|
|
62
|
+
throw new Error(
|
|
63
|
+
`connector "${slug}" (authType: ${authType ?? "none"}) is not registered in @squadbase/connectors`
|
|
64
|
+
);
|
|
478
65
|
}
|
|
479
|
-
if (
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
}
|
|
484
|
-
const connectionUrl = process.env[urlEnvName];
|
|
485
|
-
if (!connectionUrl) {
|
|
486
|
-
throw new Error(
|
|
487
|
-
`environment variable '${urlEnvName}' (mapped from connection '${connectionId}') is not set`
|
|
488
|
-
);
|
|
489
|
-
}
|
|
490
|
-
const client = createPostgreSQLClient(connectionUrl);
|
|
491
|
-
clientCache.set(connectionId, client);
|
|
492
|
-
return { client, connectorSlug };
|
|
66
|
+
if (!plugin.query) {
|
|
67
|
+
throw new Error(
|
|
68
|
+
`connector "${plugin.connectorKey}" does not support SQL queries. Non-SQL connectors (airtable, google-analytics, kintone, wix-store, dbt) should be used via TypeScript handlers.`
|
|
69
|
+
);
|
|
493
70
|
}
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
);
|
|
71
|
+
const params = resolveParams(entry, connectionId, plugin);
|
|
72
|
+
return (sql, namedParams) => plugin.query(params, sql, namedParams);
|
|
497
73
|
}
|
|
498
74
|
function reloadEnvFile2(envPath) {
|
|
499
75
|
try {
|
|
@@ -516,18 +92,31 @@ function createConnectorRegistry() {
|
|
|
516
92
|
const envPath = path.join(process.cwd(), ".env");
|
|
517
93
|
try {
|
|
518
94
|
fsWatch(filePath, { persistent: false }, () => {
|
|
519
|
-
console.log(
|
|
520
|
-
|
|
95
|
+
console.log(
|
|
96
|
+
"[connector-client] connections.json changed"
|
|
97
|
+
);
|
|
521
98
|
setImmediate(() => reloadEnvFile2(envPath));
|
|
522
99
|
});
|
|
523
100
|
} catch {
|
|
524
101
|
}
|
|
525
102
|
}
|
|
526
|
-
return {
|
|
103
|
+
return { getQuery: getQuery2, loadConnections: loadConnections2, reloadEnvFile: reloadEnvFile2, watchConnectionsFile: watchConnectionsFile2 };
|
|
104
|
+
}
|
|
105
|
+
function resolveParams(entry, connectionId, plugin) {
|
|
106
|
+
const params = {};
|
|
107
|
+
for (const param of Object.values(plugin.parameters)) {
|
|
108
|
+
if (param.required) {
|
|
109
|
+
params[param.slug] = resolveEnvVar(entry, param.slug, connectionId);
|
|
110
|
+
} else {
|
|
111
|
+
const val = resolveEnvVarOptional(entry, param.slug);
|
|
112
|
+
if (val !== void 0) params[param.slug] = val;
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
return params;
|
|
527
116
|
}
|
|
528
117
|
|
|
529
118
|
// src/connector-client/index.ts
|
|
530
|
-
var {
|
|
119
|
+
var { getQuery, loadConnections, reloadEnvFile, watchConnectionsFile } = createConnectorRegistry();
|
|
531
120
|
|
|
532
121
|
// src/types/data-source.ts
|
|
533
122
|
import { z } from "zod";
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@squadbase/vite-server",
|
|
3
|
-
"version": "0.0.1-build-
|
|
3
|
+
"version": "0.0.1-build-14",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"exports": {
|
|
6
6
|
".": {
|
|
@@ -35,6 +35,7 @@
|
|
|
35
35
|
"build:cli": "tsup src/cli/index.ts --out-dir dist/cli --format esm --platform node --no-splitting --external pg --external snowflake-sdk --external @google-cloud/bigquery --external mysql2 --external @aws-sdk/client-athena --external @aws-sdk/client-redshift-data --external @databricks/sql --external @google-analytics/data --external @kintone/rest-api-client --external hono --external @clack/prompts"
|
|
36
36
|
},
|
|
37
37
|
"dependencies": {
|
|
38
|
+
"@squadbase/connectors": "^0.0.2",
|
|
38
39
|
"@aws-sdk/client-athena": "^3.750.0",
|
|
39
40
|
"@aws-sdk/client-redshift-data": "^3.750.0",
|
|
40
41
|
"@databricks/sql": "^1.8.0",
|