@squadbase/vite-server 0.0.1-build-1 → 0.0.1-build-3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/main.js CHANGED
@@ -37,6 +37,11 @@ function resolveEnvVar(entry, key, slug) {
37
37
  }
38
38
  return value;
39
39
  }
40
+ function resolveEnvVarOptional(entry, key) {
41
+ const envVarName = entry.envVars[key];
42
+ if (!envVarName) return void 0;
43
+ return process.env[envVarName] || void 0;
44
+ }
40
45
 
41
46
  // src/connector-client/bigquery.ts
42
47
  function createBigQueryClient(entry, slug) {
@@ -105,6 +110,176 @@ function createSnowflakeClient(entry, slug) {
105
110
  };
106
111
  }
107
112
 
113
+ // src/connector-client/mysql.ts
114
+ function createMySQLClient(entry, slug) {
115
+ const connectionUrl = resolveEnvVar(entry, "connection-url", slug);
116
+ let poolPromise = null;
117
+ function getPool() {
118
+ if (!poolPromise) {
119
+ poolPromise = import("mysql2/promise").then(
120
+ (mysql) => mysql.default.createPool(connectionUrl)
121
+ );
122
+ }
123
+ return poolPromise;
124
+ }
125
+ return {
126
+ async query(sql, params) {
127
+ const pool = await getPool();
128
+ const [rows] = await pool.execute(sql, params);
129
+ return { rows };
130
+ }
131
+ };
132
+ }
133
+
134
+ // src/connector-client/aws-athena.ts
135
+ function createAthenaClient(entry, slug) {
136
+ const region = resolveEnvVar(entry, "aws-region", slug);
137
+ const accessKeyId = resolveEnvVar(entry, "aws-access-key-id", slug);
138
+ const secretAccessKey = resolveEnvVar(entry, "aws-secret-access-key", slug);
139
+ const workgroup = resolveEnvVarOptional(entry, "workgroup") ?? "primary";
140
+ const outputLocation = resolveEnvVarOptional(entry, "output-location");
141
+ return {
142
+ async query(sql) {
143
+ const {
144
+ AthenaClient,
145
+ StartQueryExecutionCommand,
146
+ GetQueryExecutionCommand,
147
+ GetQueryResultsCommand
148
+ } = await import("@aws-sdk/client-athena");
149
+ const client = new AthenaClient({
150
+ region,
151
+ credentials: { accessKeyId, secretAccessKey }
152
+ });
153
+ const startParams = {
154
+ QueryString: sql,
155
+ WorkGroup: workgroup
156
+ };
157
+ if (outputLocation) {
158
+ startParams.ResultConfiguration = { OutputLocation: outputLocation };
159
+ }
160
+ const { QueryExecutionId } = await client.send(
161
+ new StartQueryExecutionCommand(startParams)
162
+ );
163
+ if (!QueryExecutionId) throw new Error("Athena: failed to start query execution");
164
+ while (true) {
165
+ const { QueryExecution } = await client.send(
166
+ new GetQueryExecutionCommand({ QueryExecutionId })
167
+ );
168
+ const state = QueryExecution?.Status?.State;
169
+ if (state === "SUCCEEDED") break;
170
+ if (state === "FAILED") {
171
+ throw new Error(
172
+ `Athena query failed: ${QueryExecution?.Status?.StateChangeReason ?? "unknown"}`
173
+ );
174
+ }
175
+ if (state === "CANCELLED") throw new Error("Athena query was cancelled");
176
+ await new Promise((r) => setTimeout(r, 500));
177
+ }
178
+ const { ResultSet } = await client.send(
179
+ new GetQueryResultsCommand({ QueryExecutionId })
180
+ );
181
+ const resultRows = ResultSet?.Rows ?? [];
182
+ if (resultRows.length === 0) return { rows: [] };
183
+ const headers = resultRows[0].Data?.map((d) => d.VarCharValue ?? "") ?? [];
184
+ const rows = resultRows.slice(1).map((row) => {
185
+ const obj = {};
186
+ row.Data?.forEach((d, i) => {
187
+ obj[headers[i]] = d.VarCharValue ?? null;
188
+ });
189
+ return obj;
190
+ });
191
+ return { rows };
192
+ }
193
+ };
194
+ }
195
+
196
+ // src/connector-client/redshift.ts
197
+ function createRedshiftClient(entry, slug) {
198
+ const region = resolveEnvVar(entry, "aws-region", slug);
199
+ const accessKeyId = resolveEnvVar(entry, "aws-access-key-id", slug);
200
+ const secretAccessKey = resolveEnvVar(entry, "aws-secret-access-key", slug);
201
+ const database = resolveEnvVar(entry, "database", slug);
202
+ const clusterIdentifier = resolveEnvVarOptional(entry, "cluster-identifier");
203
+ const workgroupName = resolveEnvVarOptional(entry, "workgroup-name");
204
+ const secretArn = resolveEnvVarOptional(entry, "secret-arn");
205
+ const dbUser = resolveEnvVarOptional(entry, "db-user");
206
+ return {
207
+ async query(sql) {
208
+ const {
209
+ RedshiftDataClient,
210
+ ExecuteStatementCommand,
211
+ DescribeStatementCommand,
212
+ GetStatementResultCommand
213
+ } = await import("@aws-sdk/client-redshift-data");
214
+ const client = new RedshiftDataClient({
215
+ region,
216
+ credentials: { accessKeyId, secretAccessKey }
217
+ });
218
+ const executeParams = {
219
+ Sql: sql,
220
+ Database: database
221
+ };
222
+ if (clusterIdentifier) executeParams.ClusterIdentifier = clusterIdentifier;
223
+ if (workgroupName) executeParams.WorkgroupName = workgroupName;
224
+ if (secretArn) executeParams.SecretArn = secretArn;
225
+ if (dbUser) executeParams.DbUser = dbUser;
226
+ const { Id } = await client.send(
227
+ new ExecuteStatementCommand(executeParams)
228
+ );
229
+ if (!Id) throw new Error("Redshift: failed to start statement execution");
230
+ while (true) {
231
+ const desc = await client.send(new DescribeStatementCommand({ Id }));
232
+ const status = desc.Status;
233
+ if (status === "FINISHED") break;
234
+ if (status === "FAILED") {
235
+ throw new Error(`Redshift query failed: ${desc.Error ?? "unknown"}`);
236
+ }
237
+ if (status === "ABORTED") throw new Error("Redshift query was aborted");
238
+ await new Promise((r) => setTimeout(r, 500));
239
+ }
240
+ const result = await client.send(new GetStatementResultCommand({ Id }));
241
+ const columns = result.ColumnMetadata?.map((c) => c.name ?? "") ?? [];
242
+ const rows = (result.Records ?? []).map((record) => {
243
+ const obj = {};
244
+ record.forEach((field, i) => {
245
+ const col = columns[i];
246
+ const value = field.stringValue ?? field.longValue ?? field.doubleValue ?? field.booleanValue ?? (field.isNull ? null : field.blobValue ?? null);
247
+ obj[col] = value;
248
+ });
249
+ return obj;
250
+ });
251
+ return { rows };
252
+ }
253
+ };
254
+ }
255
+
256
+ // src/connector-client/databricks.ts
257
+ function createDatabricksClient(entry, slug) {
258
+ const host = resolveEnvVar(entry, "host", slug);
259
+ const httpPath = resolveEnvVar(entry, "http-path", slug);
260
+ const token = resolveEnvVar(entry, "token", slug);
261
+ return {
262
+ async query(sql) {
263
+ const { DBSQLClient } = await import("@databricks/sql");
264
+ const client = new DBSQLClient();
265
+ await client.connect({ host, path: httpPath, token });
266
+ try {
267
+ const session = await client.openSession();
268
+ try {
269
+ const operation = await session.executeStatement(sql);
270
+ const result = await operation.fetchAll();
271
+ await operation.close();
272
+ return { rows: result };
273
+ } finally {
274
+ await session.close();
275
+ }
276
+ } finally {
277
+ await client.close();
278
+ }
279
+ }
280
+ };
281
+ }
282
+
108
283
  // src/connector-client/registry.ts
109
284
  function createConnectorRegistry() {
110
285
  let connectionsCache = null;
@@ -112,7 +287,7 @@ function createConnectorRegistry() {
112
287
  function getConnectionsFilePath() {
113
288
  return process.env.CONNECTIONS_PATH ?? path.join(process.cwd(), "../../.squadbase/connections.json");
114
289
  }
115
- function loadConnections() {
290
+ function loadConnections2() {
116
291
  if (connectionsCache !== null) return connectionsCache;
117
292
  const filePath = getConnectionsFilePath();
118
293
  try {
@@ -136,7 +311,7 @@ function createConnectorRegistry() {
136
311
  }
137
312
  const cached = clientCache.get(connectorSlug);
138
313
  if (cached) return cached;
139
- const connections = loadConnections();
314
+ const connections = loadConnections2();
140
315
  const entry = connections[connectorSlug];
141
316
  if (!entry) {
142
317
  throw new Error(`connector slug '${connectorSlug}' not found in .squadbase/connections.json`);
@@ -153,6 +328,20 @@ function createConnectorRegistry() {
153
328
  if (resolvedType === "bigquery") {
154
329
  return createBigQueryClient(entry, connectorSlug);
155
330
  }
331
+ if (resolvedType === "athena") {
332
+ return createAthenaClient(entry, connectorSlug);
333
+ }
334
+ if (resolvedType === "redshift") {
335
+ return createRedshiftClient(entry, connectorSlug);
336
+ }
337
+ if (resolvedType === "databricks") {
338
+ return createDatabricksClient(entry, connectorSlug);
339
+ }
340
+ if (resolvedType === "mysql") {
341
+ const client = createMySQLClient(entry, connectorSlug);
342
+ clientCache.set(connectorSlug, client);
343
+ return client;
344
+ }
156
345
  if (resolvedType === "postgresql" || resolvedType === "squadbase-db") {
157
346
  const urlEnvName = entry.envVars["connection-url"];
158
347
  if (!urlEnvName) {
@@ -169,7 +358,7 @@ function createConnectorRegistry() {
169
358
  return client;
170
359
  }
171
360
  throw new Error(
172
- `connector type '${resolvedType}' is not supported. Supported: "snowflake", "bigquery", "postgresql", "squadbase-db"`
361
+ `connector type '${resolvedType}' is not supported as a SQL connector. Supported SQL types: "postgresql", "squadbase-db", "mysql", "snowflake", "bigquery", "athena", "redshift", "databricks". Non-SQL types (airtable, google-analytics, kintone, wix-store, dbt) should be used via TypeScript handlers.`
173
362
  );
174
363
  }
175
364
  function reloadEnvFile2(envPath) {
@@ -201,11 +390,11 @@ function createConnectorRegistry() {
201
390
  } catch {
202
391
  }
203
392
  }
204
- return { getClient: getClient2, reloadEnvFile: reloadEnvFile2, watchConnectionsFile: watchConnectionsFile2 };
393
+ return { getClient: getClient2, loadConnections: loadConnections2, reloadEnvFile: reloadEnvFile2, watchConnectionsFile: watchConnectionsFile2 };
205
394
  }
206
395
 
207
396
  // src/connector-client/index.ts
208
- var { getClient, reloadEnvFile, watchConnectionsFile } = createConnectorRegistry();
397
+ var { getClient, loadConnections, reloadEnvFile, watchConnectionsFile } = createConnectorRegistry();
209
398
 
210
399
  // src/registry.ts
211
400
  var dataSources = /* @__PURE__ */ new Map();
@@ -308,10 +497,10 @@ async function initialize() {
308
497
  cacheConfig: sqlDef.cache,
309
498
  handler: async (runtimeParams) => {
310
499
  const client = await getClient(sqlDef.connectorSlug, sqlDef.connectorType);
311
- const isExternalConnector = sqlDef.connectorType === "snowflake" || sqlDef.connectorType === "bigquery";
500
+ const isLiteralConnector = sqlDef.connectorType === "snowflake" || sqlDef.connectorType === "bigquery" || sqlDef.connectorType === "athena" || sqlDef.connectorType === "redshift" || sqlDef.connectorType === "databricks";
312
501
  let queryText;
313
502
  let queryValues;
314
- if (isExternalConnector) {
503
+ if (isLiteralConnector) {
315
504
  const defaults = new Map(
316
505
  (sqlDef.parameters ?? []).map((p) => [p.name, p.default ?? null])
317
506
  );
@@ -329,6 +518,14 @@ async function initialize() {
329
518
  }
330
519
  );
331
520
  queryValues = [];
521
+ } else if (sqlDef.connectorType === "mysql") {
522
+ const built = buildQuery(
523
+ sqlDef.query,
524
+ sqlDef.parameters ?? [],
525
+ runtimeParams
526
+ );
527
+ queryText = built.text.replace(/\$(\d+)/g, "?");
528
+ queryValues = built.values;
332
529
  } else {
333
530
  const built = buildQuery(
334
531
  sqlDef.query,
@@ -7,14 +7,14 @@ interface ParameterMeta {
7
7
  }
8
8
  interface DataSourceCacheConfig {
9
9
  /**
10
- * キャッシュの有効期間(秒)。
11
- * 0 または未指定の場合はキャッシュしない(後方互換性を保つデフォルト動作)。
10
+ * Cache TTL in seconds.
11
+ * 0 or unset means no caching (default behavior for backward compatibility).
12
12
  */
13
13
  ttl: number;
14
14
  /**
15
- * true の場合、TTL 期限切れ後も古いデータを即座に返しつつ、
16
- * バックグラウンドで新しいデータを非同期取得してキャッシュを更新する。
17
- * デフォルト: false
15
+ * When true, stale data is returned immediately after TTL expiry
16
+ * while fresh data is fetched asynchronously in the background to update the cache.
17
+ * Default: false
18
18
  */
19
19
  staleWhileRevalidate?: boolean;
20
20
  }
@@ -2,6 +2,8 @@
2
2
  import buildPlugin from "@hono/vite-build/node";
3
3
  import devServer from "@hono/vite-dev-server";
4
4
  import nodeAdapter from "@hono/vite-dev-server/node";
5
+ import { fileURLToPath } from "url";
6
+ import path3 from "path";
5
7
 
6
8
  // src/registry.ts
7
9
  import { readdir, readFile, mkdir } from "fs/promises";
@@ -37,6 +39,11 @@ function resolveEnvVar(entry, key, slug) {
37
39
  }
38
40
  return value;
39
41
  }
42
+ function resolveEnvVarOptional(entry, key) {
43
+ const envVarName = entry.envVars[key];
44
+ if (!envVarName) return void 0;
45
+ return process.env[envVarName] || void 0;
46
+ }
40
47
 
41
48
  // src/connector-client/bigquery.ts
42
49
  function createBigQueryClient(entry, slug) {
@@ -105,6 +112,176 @@ function createSnowflakeClient(entry, slug) {
105
112
  };
106
113
  }
107
114
 
115
+ // src/connector-client/mysql.ts
116
+ function createMySQLClient(entry, slug) {
117
+ const connectionUrl = resolveEnvVar(entry, "connection-url", slug);
118
+ let poolPromise = null;
119
+ function getPool() {
120
+ if (!poolPromise) {
121
+ poolPromise = import("mysql2/promise").then(
122
+ (mysql) => mysql.default.createPool(connectionUrl)
123
+ );
124
+ }
125
+ return poolPromise;
126
+ }
127
+ return {
128
+ async query(sql, params) {
129
+ const pool = await getPool();
130
+ const [rows] = await pool.execute(sql, params);
131
+ return { rows };
132
+ }
133
+ };
134
+ }
135
+
136
+ // src/connector-client/aws-athena.ts
137
+ function createAthenaClient(entry, slug) {
138
+ const region = resolveEnvVar(entry, "aws-region", slug);
139
+ const accessKeyId = resolveEnvVar(entry, "aws-access-key-id", slug);
140
+ const secretAccessKey = resolveEnvVar(entry, "aws-secret-access-key", slug);
141
+ const workgroup = resolveEnvVarOptional(entry, "workgroup") ?? "primary";
142
+ const outputLocation = resolveEnvVarOptional(entry, "output-location");
143
+ return {
144
+ async query(sql) {
145
+ const {
146
+ AthenaClient,
147
+ StartQueryExecutionCommand,
148
+ GetQueryExecutionCommand,
149
+ GetQueryResultsCommand
150
+ } = await import("@aws-sdk/client-athena");
151
+ const client = new AthenaClient({
152
+ region,
153
+ credentials: { accessKeyId, secretAccessKey }
154
+ });
155
+ const startParams = {
156
+ QueryString: sql,
157
+ WorkGroup: workgroup
158
+ };
159
+ if (outputLocation) {
160
+ startParams.ResultConfiguration = { OutputLocation: outputLocation };
161
+ }
162
+ const { QueryExecutionId } = await client.send(
163
+ new StartQueryExecutionCommand(startParams)
164
+ );
165
+ if (!QueryExecutionId) throw new Error("Athena: failed to start query execution");
166
+ while (true) {
167
+ const { QueryExecution } = await client.send(
168
+ new GetQueryExecutionCommand({ QueryExecutionId })
169
+ );
170
+ const state = QueryExecution?.Status?.State;
171
+ if (state === "SUCCEEDED") break;
172
+ if (state === "FAILED") {
173
+ throw new Error(
174
+ `Athena query failed: ${QueryExecution?.Status?.StateChangeReason ?? "unknown"}`
175
+ );
176
+ }
177
+ if (state === "CANCELLED") throw new Error("Athena query was cancelled");
178
+ await new Promise((r) => setTimeout(r, 500));
179
+ }
180
+ const { ResultSet } = await client.send(
181
+ new GetQueryResultsCommand({ QueryExecutionId })
182
+ );
183
+ const resultRows = ResultSet?.Rows ?? [];
184
+ if (resultRows.length === 0) return { rows: [] };
185
+ const headers = resultRows[0].Data?.map((d) => d.VarCharValue ?? "") ?? [];
186
+ const rows = resultRows.slice(1).map((row) => {
187
+ const obj = {};
188
+ row.Data?.forEach((d, i) => {
189
+ obj[headers[i]] = d.VarCharValue ?? null;
190
+ });
191
+ return obj;
192
+ });
193
+ return { rows };
194
+ }
195
+ };
196
+ }
197
+
198
+ // src/connector-client/redshift.ts
199
+ function createRedshiftClient(entry, slug) {
200
+ const region = resolveEnvVar(entry, "aws-region", slug);
201
+ const accessKeyId = resolveEnvVar(entry, "aws-access-key-id", slug);
202
+ const secretAccessKey = resolveEnvVar(entry, "aws-secret-access-key", slug);
203
+ const database = resolveEnvVar(entry, "database", slug);
204
+ const clusterIdentifier = resolveEnvVarOptional(entry, "cluster-identifier");
205
+ const workgroupName = resolveEnvVarOptional(entry, "workgroup-name");
206
+ const secretArn = resolveEnvVarOptional(entry, "secret-arn");
207
+ const dbUser = resolveEnvVarOptional(entry, "db-user");
208
+ return {
209
+ async query(sql) {
210
+ const {
211
+ RedshiftDataClient,
212
+ ExecuteStatementCommand,
213
+ DescribeStatementCommand,
214
+ GetStatementResultCommand
215
+ } = await import("@aws-sdk/client-redshift-data");
216
+ const client = new RedshiftDataClient({
217
+ region,
218
+ credentials: { accessKeyId, secretAccessKey }
219
+ });
220
+ const executeParams = {
221
+ Sql: sql,
222
+ Database: database
223
+ };
224
+ if (clusterIdentifier) executeParams.ClusterIdentifier = clusterIdentifier;
225
+ if (workgroupName) executeParams.WorkgroupName = workgroupName;
226
+ if (secretArn) executeParams.SecretArn = secretArn;
227
+ if (dbUser) executeParams.DbUser = dbUser;
228
+ const { Id } = await client.send(
229
+ new ExecuteStatementCommand(executeParams)
230
+ );
231
+ if (!Id) throw new Error("Redshift: failed to start statement execution");
232
+ while (true) {
233
+ const desc = await client.send(new DescribeStatementCommand({ Id }));
234
+ const status = desc.Status;
235
+ if (status === "FINISHED") break;
236
+ if (status === "FAILED") {
237
+ throw new Error(`Redshift query failed: ${desc.Error ?? "unknown"}`);
238
+ }
239
+ if (status === "ABORTED") throw new Error("Redshift query was aborted");
240
+ await new Promise((r) => setTimeout(r, 500));
241
+ }
242
+ const result = await client.send(new GetStatementResultCommand({ Id }));
243
+ const columns = result.ColumnMetadata?.map((c) => c.name ?? "") ?? [];
244
+ const rows = (result.Records ?? []).map((record) => {
245
+ const obj = {};
246
+ record.forEach((field, i) => {
247
+ const col = columns[i];
248
+ const value = field.stringValue ?? field.longValue ?? field.doubleValue ?? field.booleanValue ?? (field.isNull ? null : field.blobValue ?? null);
249
+ obj[col] = value;
250
+ });
251
+ return obj;
252
+ });
253
+ return { rows };
254
+ }
255
+ };
256
+ }
257
+
258
+ // src/connector-client/databricks.ts
259
+ function createDatabricksClient(entry, slug) {
260
+ const host = resolveEnvVar(entry, "host", slug);
261
+ const httpPath = resolveEnvVar(entry, "http-path", slug);
262
+ const token = resolveEnvVar(entry, "token", slug);
263
+ return {
264
+ async query(sql) {
265
+ const { DBSQLClient } = await import("@databricks/sql");
266
+ const client = new DBSQLClient();
267
+ await client.connect({ host, path: httpPath, token });
268
+ try {
269
+ const session = await client.openSession();
270
+ try {
271
+ const operation = await session.executeStatement(sql);
272
+ const result = await operation.fetchAll();
273
+ await operation.close();
274
+ return { rows: result };
275
+ } finally {
276
+ await session.close();
277
+ }
278
+ } finally {
279
+ await client.close();
280
+ }
281
+ }
282
+ };
283
+ }
284
+
108
285
  // src/connector-client/registry.ts
109
286
  function createConnectorRegistry() {
110
287
  let connectionsCache = null;
@@ -112,7 +289,7 @@ function createConnectorRegistry() {
112
289
  function getConnectionsFilePath() {
113
290
  return process.env.CONNECTIONS_PATH ?? path.join(process.cwd(), "../../.squadbase/connections.json");
114
291
  }
115
- function loadConnections() {
292
+ function loadConnections2() {
116
293
  if (connectionsCache !== null) return connectionsCache;
117
294
  const filePath = getConnectionsFilePath();
118
295
  try {
@@ -136,7 +313,7 @@ function createConnectorRegistry() {
136
313
  }
137
314
  const cached = clientCache.get(connectorSlug);
138
315
  if (cached) return cached;
139
- const connections = loadConnections();
316
+ const connections = loadConnections2();
140
317
  const entry = connections[connectorSlug];
141
318
  if (!entry) {
142
319
  throw new Error(`connector slug '${connectorSlug}' not found in .squadbase/connections.json`);
@@ -153,6 +330,20 @@ function createConnectorRegistry() {
153
330
  if (resolvedType === "bigquery") {
154
331
  return createBigQueryClient(entry, connectorSlug);
155
332
  }
333
+ if (resolvedType === "athena") {
334
+ return createAthenaClient(entry, connectorSlug);
335
+ }
336
+ if (resolvedType === "redshift") {
337
+ return createRedshiftClient(entry, connectorSlug);
338
+ }
339
+ if (resolvedType === "databricks") {
340
+ return createDatabricksClient(entry, connectorSlug);
341
+ }
342
+ if (resolvedType === "mysql") {
343
+ const client = createMySQLClient(entry, connectorSlug);
344
+ clientCache.set(connectorSlug, client);
345
+ return client;
346
+ }
156
347
  if (resolvedType === "postgresql" || resolvedType === "squadbase-db") {
157
348
  const urlEnvName = entry.envVars["connection-url"];
158
349
  if (!urlEnvName) {
@@ -169,7 +360,7 @@ function createConnectorRegistry() {
169
360
  return client;
170
361
  }
171
362
  throw new Error(
172
- `connector type '${resolvedType}' is not supported. Supported: "snowflake", "bigquery", "postgresql", "squadbase-db"`
363
+ `connector type '${resolvedType}' is not supported as a SQL connector. Supported SQL types: "postgresql", "squadbase-db", "mysql", "snowflake", "bigquery", "athena", "redshift", "databricks". Non-SQL types (airtable, google-analytics, kintone, wix-store, dbt) should be used via TypeScript handlers.`
173
364
  );
174
365
  }
175
366
  function reloadEnvFile2(envPath) {
@@ -201,11 +392,11 @@ function createConnectorRegistry() {
201
392
  } catch {
202
393
  }
203
394
  }
204
- return { getClient: getClient2, reloadEnvFile: reloadEnvFile2, watchConnectionsFile: watchConnectionsFile2 };
395
+ return { getClient: getClient2, loadConnections: loadConnections2, reloadEnvFile: reloadEnvFile2, watchConnectionsFile: watchConnectionsFile2 };
205
396
  }
206
397
 
207
398
  // src/connector-client/index.ts
208
- var { getClient, reloadEnvFile, watchConnectionsFile } = createConnectorRegistry();
399
+ var { getClient, loadConnections, reloadEnvFile, watchConnectionsFile } = createConnectorRegistry();
209
400
 
210
401
  // src/registry.ts
211
402
  var viteServer = null;
@@ -226,6 +417,17 @@ var DEFAULT_EXCLUDE = [
226
417
  /^\/node_modules\/.*/,
227
418
  /^(?!\/api)/
228
419
  ];
420
+ function resolveEntry(entry) {
421
+ if (entry.startsWith(".") || entry.startsWith("/")) return entry;
422
+ try {
423
+ const resolvedUrl = import.meta.resolve(entry);
424
+ const absolutePath = fileURLToPath(resolvedUrl);
425
+ const relativePath = path3.relative(process.cwd(), absolutePath).replace(/\\/g, "/");
426
+ return "./" + relativePath;
427
+ } catch {
428
+ return entry;
429
+ }
430
+ }
229
431
  function squadbasePlugin(options = {}) {
230
432
  const {
231
433
  buildEntry = "@squadbase/vite-server/main",
@@ -236,7 +438,7 @@ function squadbasePlugin(options = {}) {
236
438
  } = options;
237
439
  const isServerBuild = (_, { command, mode }) => command === "build" && mode !== "client";
238
440
  const rawBuildPlugin = buildPlugin({
239
- entry: buildEntry,
441
+ entry: resolveEntry(buildEntry),
240
442
  outputDir: "./dist/server",
241
443
  output: "index.js",
242
444
  port,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@squadbase/vite-server",
3
- "version": "0.0.1-build-1",
3
+ "version": "0.0.1-build-3",
4
4
  "type": "module",
5
5
  "exports": {
6
6
  ".": {
@@ -30,14 +30,21 @@
30
30
  "scripts": {
31
31
  "cli": "tsx src/cli/index.ts",
32
32
  "build": "tsup && pnpm run build:cli",
33
- "build:cli": "tsup src/cli/index.ts --out-dir dist/cli --format esm --platform node --no-splitting --external pg --external snowflake-sdk --external @google-cloud/bigquery --external hono --external @clack/prompts"
33
+ "build:cli": "tsup src/cli/index.ts --out-dir dist/cli --format esm --platform node --no-splitting --external pg --external snowflake-sdk --external @google-cloud/bigquery --external mysql2 --external @aws-sdk/client-athena --external @aws-sdk/client-redshift-data --external @databricks/sql --external @google-analytics/data --external @kintone/rest-api-client --external hono --external @clack/prompts"
34
34
  },
35
35
  "dependencies": {
36
+ "@aws-sdk/client-athena": "^3.750.0",
37
+ "@aws-sdk/client-redshift-data": "^3.750.0",
38
+ "@databricks/sql": "^1.8.0",
39
+
40
+ "@google-analytics/data": "^4.8.0",
36
41
  "@google-cloud/bigquery": "^7.9.4",
37
42
  "@hono/node-server": "^1.19.9",
38
43
  "@hono/vite-build": "^1.10.0",
39
44
  "@hono/vite-dev-server": "^0.18.0",
45
+ "@kintone/rest-api-client": "^5.5.0",
40
46
  "hono": "^4.11.9",
47
+ "mysql2": "^3.11.0",
41
48
  "pg": "^8.18.0",
42
49
  "snowflake-sdk": "^1.15.0"
43
50
  },