@squadbase/vite-server 0.0.1-build-3 → 0.0.1-build-5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +1 -120
- package/dist/index.js +60 -530
- package/dist/main.js +59 -260
- package/dist/types/data-source.d.ts +14 -9
- package/dist/vite-plugin.js +34 -251
- package/package.json +2 -9
- package/dist/cli/index.d.ts +0 -1
- package/dist/cli/index.js +0 -848
package/dist/vite-plugin.js
CHANGED
|
@@ -2,8 +2,6 @@
|
|
|
2
2
|
import buildPlugin from "@hono/vite-build/node";
|
|
3
3
|
import devServer from "@hono/vite-dev-server";
|
|
4
4
|
import nodeAdapter from "@hono/vite-dev-server/node";
|
|
5
|
-
import { fileURLToPath } from "url";
|
|
6
|
-
import path3 from "path";
|
|
7
5
|
|
|
8
6
|
// src/registry.ts
|
|
9
7
|
import { readdir, readFile, mkdir } from "fs/promises";
|
|
@@ -28,34 +26,29 @@ function createPostgreSQLClient(connectionString) {
|
|
|
28
26
|
}
|
|
29
27
|
|
|
30
28
|
// src/connector-client/env.ts
|
|
31
|
-
function resolveEnvVar(entry, key,
|
|
29
|
+
function resolveEnvVar(entry, key, connectionId) {
|
|
32
30
|
const envVarName = entry.envVars[key];
|
|
33
31
|
if (!envVarName) {
|
|
34
|
-
throw new Error(`
|
|
32
|
+
throw new Error(`Connection "${connectionId}" is missing envVars mapping for key "${key}"`);
|
|
35
33
|
}
|
|
36
34
|
const value = process.env[envVarName];
|
|
37
35
|
if (!value) {
|
|
38
|
-
throw new Error(`Environment variable "${envVarName}" (for "${
|
|
36
|
+
throw new Error(`Environment variable "${envVarName}" (for connection "${connectionId}", key "${key}") is not set`);
|
|
39
37
|
}
|
|
40
38
|
return value;
|
|
41
39
|
}
|
|
42
|
-
function resolveEnvVarOptional(entry, key) {
|
|
43
|
-
const envVarName = entry.envVars[key];
|
|
44
|
-
if (!envVarName) return void 0;
|
|
45
|
-
return process.env[envVarName] || void 0;
|
|
46
|
-
}
|
|
47
40
|
|
|
48
41
|
// src/connector-client/bigquery.ts
|
|
49
|
-
function createBigQueryClient(entry,
|
|
50
|
-
const projectId = resolveEnvVar(entry, "project-id",
|
|
51
|
-
const serviceAccountJsonBase64 = resolveEnvVar(entry, "service-account-json-base64",
|
|
42
|
+
function createBigQueryClient(entry, connectionId) {
|
|
43
|
+
const projectId = resolveEnvVar(entry, "project-id", connectionId);
|
|
44
|
+
const serviceAccountJsonBase64 = resolveEnvVar(entry, "service-account-json-base64", connectionId);
|
|
52
45
|
const serviceAccountJson = Buffer.from(serviceAccountJsonBase64, "base64").toString("utf-8");
|
|
53
46
|
let gcpCredentials;
|
|
54
47
|
try {
|
|
55
48
|
gcpCredentials = JSON.parse(serviceAccountJson);
|
|
56
49
|
} catch {
|
|
57
50
|
throw new Error(
|
|
58
|
-
`BigQuery service account JSON (decoded from base64) is not valid JSON for
|
|
51
|
+
`BigQuery service account JSON (decoded from base64) is not valid JSON for connection "${connectionId}"`
|
|
59
52
|
);
|
|
60
53
|
}
|
|
61
54
|
return {
|
|
@@ -70,12 +63,12 @@ function createBigQueryClient(entry, slug) {
|
|
|
70
63
|
}
|
|
71
64
|
|
|
72
65
|
// src/connector-client/snowflake.ts
|
|
73
|
-
function createSnowflakeClient(entry,
|
|
74
|
-
const accountIdentifier = resolveEnvVar(entry, "account",
|
|
75
|
-
const user = resolveEnvVar(entry, "user",
|
|
76
|
-
const role = resolveEnvVar(entry, "role",
|
|
77
|
-
const warehouse = resolveEnvVar(entry, "warehouse",
|
|
78
|
-
const privateKeyBase64 = resolveEnvVar(entry, "private-key-base64",
|
|
66
|
+
function createSnowflakeClient(entry, connectionId) {
|
|
67
|
+
const accountIdentifier = resolveEnvVar(entry, "account", connectionId);
|
|
68
|
+
const user = resolveEnvVar(entry, "user", connectionId);
|
|
69
|
+
const role = resolveEnvVar(entry, "role", connectionId);
|
|
70
|
+
const warehouse = resolveEnvVar(entry, "warehouse", connectionId);
|
|
71
|
+
const privateKeyBase64 = resolveEnvVar(entry, "private-key-base64", connectionId);
|
|
79
72
|
const privateKey = Buffer.from(privateKeyBase64, "base64").toString("utf-8");
|
|
80
73
|
return {
|
|
81
74
|
async query(sql) {
|
|
@@ -112,176 +105,6 @@ function createSnowflakeClient(entry, slug) {
|
|
|
112
105
|
};
|
|
113
106
|
}
|
|
114
107
|
|
|
115
|
-
// src/connector-client/mysql.ts
|
|
116
|
-
function createMySQLClient(entry, slug) {
|
|
117
|
-
const connectionUrl = resolveEnvVar(entry, "connection-url", slug);
|
|
118
|
-
let poolPromise = null;
|
|
119
|
-
function getPool() {
|
|
120
|
-
if (!poolPromise) {
|
|
121
|
-
poolPromise = import("mysql2/promise").then(
|
|
122
|
-
(mysql) => mysql.default.createPool(connectionUrl)
|
|
123
|
-
);
|
|
124
|
-
}
|
|
125
|
-
return poolPromise;
|
|
126
|
-
}
|
|
127
|
-
return {
|
|
128
|
-
async query(sql, params) {
|
|
129
|
-
const pool = await getPool();
|
|
130
|
-
const [rows] = await pool.execute(sql, params);
|
|
131
|
-
return { rows };
|
|
132
|
-
}
|
|
133
|
-
};
|
|
134
|
-
}
|
|
135
|
-
|
|
136
|
-
// src/connector-client/aws-athena.ts
|
|
137
|
-
function createAthenaClient(entry, slug) {
|
|
138
|
-
const region = resolveEnvVar(entry, "aws-region", slug);
|
|
139
|
-
const accessKeyId = resolveEnvVar(entry, "aws-access-key-id", slug);
|
|
140
|
-
const secretAccessKey = resolveEnvVar(entry, "aws-secret-access-key", slug);
|
|
141
|
-
const workgroup = resolveEnvVarOptional(entry, "workgroup") ?? "primary";
|
|
142
|
-
const outputLocation = resolveEnvVarOptional(entry, "output-location");
|
|
143
|
-
return {
|
|
144
|
-
async query(sql) {
|
|
145
|
-
const {
|
|
146
|
-
AthenaClient,
|
|
147
|
-
StartQueryExecutionCommand,
|
|
148
|
-
GetQueryExecutionCommand,
|
|
149
|
-
GetQueryResultsCommand
|
|
150
|
-
} = await import("@aws-sdk/client-athena");
|
|
151
|
-
const client = new AthenaClient({
|
|
152
|
-
region,
|
|
153
|
-
credentials: { accessKeyId, secretAccessKey }
|
|
154
|
-
});
|
|
155
|
-
const startParams = {
|
|
156
|
-
QueryString: sql,
|
|
157
|
-
WorkGroup: workgroup
|
|
158
|
-
};
|
|
159
|
-
if (outputLocation) {
|
|
160
|
-
startParams.ResultConfiguration = { OutputLocation: outputLocation };
|
|
161
|
-
}
|
|
162
|
-
const { QueryExecutionId } = await client.send(
|
|
163
|
-
new StartQueryExecutionCommand(startParams)
|
|
164
|
-
);
|
|
165
|
-
if (!QueryExecutionId) throw new Error("Athena: failed to start query execution");
|
|
166
|
-
while (true) {
|
|
167
|
-
const { QueryExecution } = await client.send(
|
|
168
|
-
new GetQueryExecutionCommand({ QueryExecutionId })
|
|
169
|
-
);
|
|
170
|
-
const state = QueryExecution?.Status?.State;
|
|
171
|
-
if (state === "SUCCEEDED") break;
|
|
172
|
-
if (state === "FAILED") {
|
|
173
|
-
throw new Error(
|
|
174
|
-
`Athena query failed: ${QueryExecution?.Status?.StateChangeReason ?? "unknown"}`
|
|
175
|
-
);
|
|
176
|
-
}
|
|
177
|
-
if (state === "CANCELLED") throw new Error("Athena query was cancelled");
|
|
178
|
-
await new Promise((r) => setTimeout(r, 500));
|
|
179
|
-
}
|
|
180
|
-
const { ResultSet } = await client.send(
|
|
181
|
-
new GetQueryResultsCommand({ QueryExecutionId })
|
|
182
|
-
);
|
|
183
|
-
const resultRows = ResultSet?.Rows ?? [];
|
|
184
|
-
if (resultRows.length === 0) return { rows: [] };
|
|
185
|
-
const headers = resultRows[0].Data?.map((d) => d.VarCharValue ?? "") ?? [];
|
|
186
|
-
const rows = resultRows.slice(1).map((row) => {
|
|
187
|
-
const obj = {};
|
|
188
|
-
row.Data?.forEach((d, i) => {
|
|
189
|
-
obj[headers[i]] = d.VarCharValue ?? null;
|
|
190
|
-
});
|
|
191
|
-
return obj;
|
|
192
|
-
});
|
|
193
|
-
return { rows };
|
|
194
|
-
}
|
|
195
|
-
};
|
|
196
|
-
}
|
|
197
|
-
|
|
198
|
-
// src/connector-client/redshift.ts
|
|
199
|
-
function createRedshiftClient(entry, slug) {
|
|
200
|
-
const region = resolveEnvVar(entry, "aws-region", slug);
|
|
201
|
-
const accessKeyId = resolveEnvVar(entry, "aws-access-key-id", slug);
|
|
202
|
-
const secretAccessKey = resolveEnvVar(entry, "aws-secret-access-key", slug);
|
|
203
|
-
const database = resolveEnvVar(entry, "database", slug);
|
|
204
|
-
const clusterIdentifier = resolveEnvVarOptional(entry, "cluster-identifier");
|
|
205
|
-
const workgroupName = resolveEnvVarOptional(entry, "workgroup-name");
|
|
206
|
-
const secretArn = resolveEnvVarOptional(entry, "secret-arn");
|
|
207
|
-
const dbUser = resolveEnvVarOptional(entry, "db-user");
|
|
208
|
-
return {
|
|
209
|
-
async query(sql) {
|
|
210
|
-
const {
|
|
211
|
-
RedshiftDataClient,
|
|
212
|
-
ExecuteStatementCommand,
|
|
213
|
-
DescribeStatementCommand,
|
|
214
|
-
GetStatementResultCommand
|
|
215
|
-
} = await import("@aws-sdk/client-redshift-data");
|
|
216
|
-
const client = new RedshiftDataClient({
|
|
217
|
-
region,
|
|
218
|
-
credentials: { accessKeyId, secretAccessKey }
|
|
219
|
-
});
|
|
220
|
-
const executeParams = {
|
|
221
|
-
Sql: sql,
|
|
222
|
-
Database: database
|
|
223
|
-
};
|
|
224
|
-
if (clusterIdentifier) executeParams.ClusterIdentifier = clusterIdentifier;
|
|
225
|
-
if (workgroupName) executeParams.WorkgroupName = workgroupName;
|
|
226
|
-
if (secretArn) executeParams.SecretArn = secretArn;
|
|
227
|
-
if (dbUser) executeParams.DbUser = dbUser;
|
|
228
|
-
const { Id } = await client.send(
|
|
229
|
-
new ExecuteStatementCommand(executeParams)
|
|
230
|
-
);
|
|
231
|
-
if (!Id) throw new Error("Redshift: failed to start statement execution");
|
|
232
|
-
while (true) {
|
|
233
|
-
const desc = await client.send(new DescribeStatementCommand({ Id }));
|
|
234
|
-
const status = desc.Status;
|
|
235
|
-
if (status === "FINISHED") break;
|
|
236
|
-
if (status === "FAILED") {
|
|
237
|
-
throw new Error(`Redshift query failed: ${desc.Error ?? "unknown"}`);
|
|
238
|
-
}
|
|
239
|
-
if (status === "ABORTED") throw new Error("Redshift query was aborted");
|
|
240
|
-
await new Promise((r) => setTimeout(r, 500));
|
|
241
|
-
}
|
|
242
|
-
const result = await client.send(new GetStatementResultCommand({ Id }));
|
|
243
|
-
const columns = result.ColumnMetadata?.map((c) => c.name ?? "") ?? [];
|
|
244
|
-
const rows = (result.Records ?? []).map((record) => {
|
|
245
|
-
const obj = {};
|
|
246
|
-
record.forEach((field, i) => {
|
|
247
|
-
const col = columns[i];
|
|
248
|
-
const value = field.stringValue ?? field.longValue ?? field.doubleValue ?? field.booleanValue ?? (field.isNull ? null : field.blobValue ?? null);
|
|
249
|
-
obj[col] = value;
|
|
250
|
-
});
|
|
251
|
-
return obj;
|
|
252
|
-
});
|
|
253
|
-
return { rows };
|
|
254
|
-
}
|
|
255
|
-
};
|
|
256
|
-
}
|
|
257
|
-
|
|
258
|
-
// src/connector-client/databricks.ts
|
|
259
|
-
function createDatabricksClient(entry, slug) {
|
|
260
|
-
const host = resolveEnvVar(entry, "host", slug);
|
|
261
|
-
const httpPath = resolveEnvVar(entry, "http-path", slug);
|
|
262
|
-
const token = resolveEnvVar(entry, "token", slug);
|
|
263
|
-
return {
|
|
264
|
-
async query(sql) {
|
|
265
|
-
const { DBSQLClient } = await import("@databricks/sql");
|
|
266
|
-
const client = new DBSQLClient();
|
|
267
|
-
await client.connect({ host, path: httpPath, token });
|
|
268
|
-
try {
|
|
269
|
-
const session = await client.openSession();
|
|
270
|
-
try {
|
|
271
|
-
const operation = await session.executeStatement(sql);
|
|
272
|
-
const result = await operation.fetchAll();
|
|
273
|
-
await operation.close();
|
|
274
|
-
return { rows: result };
|
|
275
|
-
} finally {
|
|
276
|
-
await session.close();
|
|
277
|
-
}
|
|
278
|
-
} finally {
|
|
279
|
-
await client.close();
|
|
280
|
-
}
|
|
281
|
-
}
|
|
282
|
-
};
|
|
283
|
-
}
|
|
284
|
-
|
|
285
108
|
// src/connector-client/registry.ts
|
|
286
109
|
function createConnectorRegistry() {
|
|
287
110
|
let connectionsCache = null;
|
|
@@ -289,7 +112,7 @@ function createConnectorRegistry() {
|
|
|
289
112
|
function getConnectionsFilePath() {
|
|
290
113
|
return process.env.CONNECTIONS_PATH ?? path.join(process.cwd(), "../../.squadbase/connections.json");
|
|
291
114
|
}
|
|
292
|
-
function
|
|
115
|
+
function loadConnections() {
|
|
293
116
|
if (connectionsCache !== null) return connectionsCache;
|
|
294
117
|
const filePath = getConnectionsFilePath();
|
|
295
118
|
try {
|
|
@@ -300,67 +123,38 @@ function createConnectorRegistry() {
|
|
|
300
123
|
}
|
|
301
124
|
return connectionsCache;
|
|
302
125
|
}
|
|
303
|
-
async function getClient2(
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
const cached2 = clientCache.get(cacheKey);
|
|
307
|
-
if (cached2) return cached2;
|
|
308
|
-
const url = process.env.SQUADBASE_POSTGRESQL_URL;
|
|
309
|
-
if (!url) throw new Error("SQUADBASE_POSTGRESQL_URL environment variable is not set");
|
|
310
|
-
const client = createPostgreSQLClient(url);
|
|
311
|
-
clientCache.set(cacheKey, client);
|
|
312
|
-
return client;
|
|
313
|
-
}
|
|
314
|
-
const cached = clientCache.get(connectorSlug);
|
|
315
|
-
if (cached) return cached;
|
|
316
|
-
const connections = loadConnections2();
|
|
317
|
-
const entry = connections[connectorSlug];
|
|
126
|
+
async function getClient2(connectionId) {
|
|
127
|
+
const connections = loadConnections();
|
|
128
|
+
const entry = connections[connectionId];
|
|
318
129
|
if (!entry) {
|
|
319
|
-
throw new Error(`
|
|
320
|
-
}
|
|
321
|
-
const resolvedType = connectorType ?? entry.connectorType;
|
|
322
|
-
if (!resolvedType) {
|
|
323
|
-
throw new Error(
|
|
324
|
-
`connector type could not be determined for slug '${connectorSlug}'. Specify connectorType in the data-source JSON or in .squadbase/connections.json.`
|
|
325
|
-
);
|
|
326
|
-
}
|
|
327
|
-
if (resolvedType === "snowflake") {
|
|
328
|
-
return createSnowflakeClient(entry, connectorSlug);
|
|
329
|
-
}
|
|
330
|
-
if (resolvedType === "bigquery") {
|
|
331
|
-
return createBigQueryClient(entry, connectorSlug);
|
|
130
|
+
throw new Error(`connection '${connectionId}' not found in .squadbase/connections.json`);
|
|
332
131
|
}
|
|
333
|
-
|
|
334
|
-
|
|
132
|
+
const connectorSlug = entry.connector.slug;
|
|
133
|
+
const cached = clientCache.get(connectionId);
|
|
134
|
+
if (cached) return { client: cached, connectorSlug };
|
|
135
|
+
if (connectorSlug === "snowflake") {
|
|
136
|
+
return { client: createSnowflakeClient(entry, connectionId), connectorSlug };
|
|
335
137
|
}
|
|
336
|
-
if (
|
|
337
|
-
return
|
|
138
|
+
if (connectorSlug === "bigquery") {
|
|
139
|
+
return { client: createBigQueryClient(entry, connectionId), connectorSlug };
|
|
338
140
|
}
|
|
339
|
-
if (
|
|
340
|
-
return createDatabricksClient(entry, connectorSlug);
|
|
341
|
-
}
|
|
342
|
-
if (resolvedType === "mysql") {
|
|
343
|
-
const client = createMySQLClient(entry, connectorSlug);
|
|
344
|
-
clientCache.set(connectorSlug, client);
|
|
345
|
-
return client;
|
|
346
|
-
}
|
|
347
|
-
if (resolvedType === "postgresql" || resolvedType === "squadbase-db") {
|
|
141
|
+
if (connectorSlug === "postgresql" || connectorSlug === "squadbase-db") {
|
|
348
142
|
const urlEnvName = entry.envVars["connection-url"];
|
|
349
143
|
if (!urlEnvName) {
|
|
350
|
-
throw new Error(`'connection-url' is not defined in envVars for
|
|
144
|
+
throw new Error(`'connection-url' is not defined in envVars for connection '${connectionId}'`);
|
|
351
145
|
}
|
|
352
146
|
const connectionUrl = process.env[urlEnvName];
|
|
353
147
|
if (!connectionUrl) {
|
|
354
148
|
throw new Error(
|
|
355
|
-
`environment variable '${urlEnvName}' (mapped from
|
|
149
|
+
`environment variable '${urlEnvName}' (mapped from connection '${connectionId}') is not set`
|
|
356
150
|
);
|
|
357
151
|
}
|
|
358
152
|
const client = createPostgreSQLClient(connectionUrl);
|
|
359
|
-
clientCache.set(
|
|
360
|
-
return client;
|
|
153
|
+
clientCache.set(connectionId, client);
|
|
154
|
+
return { client, connectorSlug };
|
|
361
155
|
}
|
|
362
156
|
throw new Error(
|
|
363
|
-
`connector type '${
|
|
157
|
+
`connector type '${connectorSlug}' is not supported. Supported: "snowflake", "bigquery", "postgresql", "squadbase-db"`
|
|
364
158
|
);
|
|
365
159
|
}
|
|
366
160
|
function reloadEnvFile2(envPath) {
|
|
@@ -392,11 +186,11 @@ function createConnectorRegistry() {
|
|
|
392
186
|
} catch {
|
|
393
187
|
}
|
|
394
188
|
}
|
|
395
|
-
return { getClient: getClient2,
|
|
189
|
+
return { getClient: getClient2, reloadEnvFile: reloadEnvFile2, watchConnectionsFile: watchConnectionsFile2 };
|
|
396
190
|
}
|
|
397
191
|
|
|
398
192
|
// src/connector-client/index.ts
|
|
399
|
-
var { getClient,
|
|
193
|
+
var { getClient, reloadEnvFile, watchConnectionsFile } = createConnectorRegistry();
|
|
400
194
|
|
|
401
195
|
// src/registry.ts
|
|
402
196
|
var viteServer = null;
|
|
@@ -417,17 +211,6 @@ var DEFAULT_EXCLUDE = [
|
|
|
417
211
|
/^\/node_modules\/.*/,
|
|
418
212
|
/^(?!\/api)/
|
|
419
213
|
];
|
|
420
|
-
function resolveEntry(entry) {
|
|
421
|
-
if (entry.startsWith(".") || entry.startsWith("/")) return entry;
|
|
422
|
-
try {
|
|
423
|
-
const resolvedUrl = import.meta.resolve(entry);
|
|
424
|
-
const absolutePath = fileURLToPath(resolvedUrl);
|
|
425
|
-
const relativePath = path3.relative(process.cwd(), absolutePath).replace(/\\/g, "/");
|
|
426
|
-
return "./" + relativePath;
|
|
427
|
-
} catch {
|
|
428
|
-
return entry;
|
|
429
|
-
}
|
|
430
|
-
}
|
|
431
214
|
function squadbasePlugin(options = {}) {
|
|
432
215
|
const {
|
|
433
216
|
buildEntry = "@squadbase/vite-server/main",
|
|
@@ -438,7 +221,7 @@ function squadbasePlugin(options = {}) {
|
|
|
438
221
|
} = options;
|
|
439
222
|
const isServerBuild = (_, { command, mode }) => command === "build" && mode !== "client";
|
|
440
223
|
const rawBuildPlugin = buildPlugin({
|
|
441
|
-
entry:
|
|
224
|
+
entry: buildEntry,
|
|
442
225
|
outputDir: "./dist/server",
|
|
443
226
|
output: "index.js",
|
|
444
227
|
port,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@squadbase/vite-server",
|
|
3
|
-
"version": "0.0.1-build-
|
|
3
|
+
"version": "0.0.1-build-5",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"exports": {
|
|
6
6
|
".": {
|
|
@@ -30,21 +30,14 @@
|
|
|
30
30
|
"scripts": {
|
|
31
31
|
"cli": "tsx src/cli/index.ts",
|
|
32
32
|
"build": "tsup && pnpm run build:cli",
|
|
33
|
-
"build:cli": "tsup src/cli/index.ts --out-dir dist/cli --format esm --platform node --no-splitting --external pg --external snowflake-sdk --external @google-cloud/bigquery --external
|
|
33
|
+
"build:cli": "tsup src/cli/index.ts --out-dir dist/cli --format esm --platform node --no-splitting --external pg --external snowflake-sdk --external @google-cloud/bigquery --external hono --external @clack/prompts"
|
|
34
34
|
},
|
|
35
35
|
"dependencies": {
|
|
36
|
-
"@aws-sdk/client-athena": "^3.750.0",
|
|
37
|
-
"@aws-sdk/client-redshift-data": "^3.750.0",
|
|
38
|
-
"@databricks/sql": "^1.8.0",
|
|
39
|
-
|
|
40
|
-
"@google-analytics/data": "^4.8.0",
|
|
41
36
|
"@google-cloud/bigquery": "^7.9.4",
|
|
42
37
|
"@hono/node-server": "^1.19.9",
|
|
43
38
|
"@hono/vite-build": "^1.10.0",
|
|
44
39
|
"@hono/vite-dev-server": "^0.18.0",
|
|
45
|
-
"@kintone/rest-api-client": "^5.5.0",
|
|
46
40
|
"hono": "^4.11.9",
|
|
47
|
-
"mysql2": "^3.11.0",
|
|
48
41
|
"pg": "^8.18.0",
|
|
49
42
|
"snowflake-sdk": "^1.15.0"
|
|
50
43
|
},
|
package/dist/cli/index.d.ts
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|