@squadbase/vite-server 0.0.1-build-3 → 0.0.1-build-4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/main.js CHANGED
@@ -37,11 +37,6 @@ function resolveEnvVar(entry, key, slug) {
37
37
  }
38
38
  return value;
39
39
  }
40
- function resolveEnvVarOptional(entry, key) {
41
- const envVarName = entry.envVars[key];
42
- if (!envVarName) return void 0;
43
- return process.env[envVarName] || void 0;
44
- }
45
40
 
46
41
  // src/connector-client/bigquery.ts
47
42
  function createBigQueryClient(entry, slug) {
@@ -110,176 +105,6 @@ function createSnowflakeClient(entry, slug) {
110
105
  };
111
106
  }
112
107
 
113
- // src/connector-client/mysql.ts
114
- function createMySQLClient(entry, slug) {
115
- const connectionUrl = resolveEnvVar(entry, "connection-url", slug);
116
- let poolPromise = null;
117
- function getPool() {
118
- if (!poolPromise) {
119
- poolPromise = import("mysql2/promise").then(
120
- (mysql) => mysql.default.createPool(connectionUrl)
121
- );
122
- }
123
- return poolPromise;
124
- }
125
- return {
126
- async query(sql, params) {
127
- const pool = await getPool();
128
- const [rows] = await pool.execute(sql, params);
129
- return { rows };
130
- }
131
- };
132
- }
133
-
134
- // src/connector-client/aws-athena.ts
135
- function createAthenaClient(entry, slug) {
136
- const region = resolveEnvVar(entry, "aws-region", slug);
137
- const accessKeyId = resolveEnvVar(entry, "aws-access-key-id", slug);
138
- const secretAccessKey = resolveEnvVar(entry, "aws-secret-access-key", slug);
139
- const workgroup = resolveEnvVarOptional(entry, "workgroup") ?? "primary";
140
- const outputLocation = resolveEnvVarOptional(entry, "output-location");
141
- return {
142
- async query(sql) {
143
- const {
144
- AthenaClient,
145
- StartQueryExecutionCommand,
146
- GetQueryExecutionCommand,
147
- GetQueryResultsCommand
148
- } = await import("@aws-sdk/client-athena");
149
- const client = new AthenaClient({
150
- region,
151
- credentials: { accessKeyId, secretAccessKey }
152
- });
153
- const startParams = {
154
- QueryString: sql,
155
- WorkGroup: workgroup
156
- };
157
- if (outputLocation) {
158
- startParams.ResultConfiguration = { OutputLocation: outputLocation };
159
- }
160
- const { QueryExecutionId } = await client.send(
161
- new StartQueryExecutionCommand(startParams)
162
- );
163
- if (!QueryExecutionId) throw new Error("Athena: failed to start query execution");
164
- while (true) {
165
- const { QueryExecution } = await client.send(
166
- new GetQueryExecutionCommand({ QueryExecutionId })
167
- );
168
- const state = QueryExecution?.Status?.State;
169
- if (state === "SUCCEEDED") break;
170
- if (state === "FAILED") {
171
- throw new Error(
172
- `Athena query failed: ${QueryExecution?.Status?.StateChangeReason ?? "unknown"}`
173
- );
174
- }
175
- if (state === "CANCELLED") throw new Error("Athena query was cancelled");
176
- await new Promise((r) => setTimeout(r, 500));
177
- }
178
- const { ResultSet } = await client.send(
179
- new GetQueryResultsCommand({ QueryExecutionId })
180
- );
181
- const resultRows = ResultSet?.Rows ?? [];
182
- if (resultRows.length === 0) return { rows: [] };
183
- const headers = resultRows[0].Data?.map((d) => d.VarCharValue ?? "") ?? [];
184
- const rows = resultRows.slice(1).map((row) => {
185
- const obj = {};
186
- row.Data?.forEach((d, i) => {
187
- obj[headers[i]] = d.VarCharValue ?? null;
188
- });
189
- return obj;
190
- });
191
- return { rows };
192
- }
193
- };
194
- }
195
-
196
- // src/connector-client/redshift.ts
197
- function createRedshiftClient(entry, slug) {
198
- const region = resolveEnvVar(entry, "aws-region", slug);
199
- const accessKeyId = resolveEnvVar(entry, "aws-access-key-id", slug);
200
- const secretAccessKey = resolveEnvVar(entry, "aws-secret-access-key", slug);
201
- const database = resolveEnvVar(entry, "database", slug);
202
- const clusterIdentifier = resolveEnvVarOptional(entry, "cluster-identifier");
203
- const workgroupName = resolveEnvVarOptional(entry, "workgroup-name");
204
- const secretArn = resolveEnvVarOptional(entry, "secret-arn");
205
- const dbUser = resolveEnvVarOptional(entry, "db-user");
206
- return {
207
- async query(sql) {
208
- const {
209
- RedshiftDataClient,
210
- ExecuteStatementCommand,
211
- DescribeStatementCommand,
212
- GetStatementResultCommand
213
- } = await import("@aws-sdk/client-redshift-data");
214
- const client = new RedshiftDataClient({
215
- region,
216
- credentials: { accessKeyId, secretAccessKey }
217
- });
218
- const executeParams = {
219
- Sql: sql,
220
- Database: database
221
- };
222
- if (clusterIdentifier) executeParams.ClusterIdentifier = clusterIdentifier;
223
- if (workgroupName) executeParams.WorkgroupName = workgroupName;
224
- if (secretArn) executeParams.SecretArn = secretArn;
225
- if (dbUser) executeParams.DbUser = dbUser;
226
- const { Id } = await client.send(
227
- new ExecuteStatementCommand(executeParams)
228
- );
229
- if (!Id) throw new Error("Redshift: failed to start statement execution");
230
- while (true) {
231
- const desc = await client.send(new DescribeStatementCommand({ Id }));
232
- const status = desc.Status;
233
- if (status === "FINISHED") break;
234
- if (status === "FAILED") {
235
- throw new Error(`Redshift query failed: ${desc.Error ?? "unknown"}`);
236
- }
237
- if (status === "ABORTED") throw new Error("Redshift query was aborted");
238
- await new Promise((r) => setTimeout(r, 500));
239
- }
240
- const result = await client.send(new GetStatementResultCommand({ Id }));
241
- const columns = result.ColumnMetadata?.map((c) => c.name ?? "") ?? [];
242
- const rows = (result.Records ?? []).map((record) => {
243
- const obj = {};
244
- record.forEach((field, i) => {
245
- const col = columns[i];
246
- const value = field.stringValue ?? field.longValue ?? field.doubleValue ?? field.booleanValue ?? (field.isNull ? null : field.blobValue ?? null);
247
- obj[col] = value;
248
- });
249
- return obj;
250
- });
251
- return { rows };
252
- }
253
- };
254
- }
255
-
256
- // src/connector-client/databricks.ts
257
- function createDatabricksClient(entry, slug) {
258
- const host = resolveEnvVar(entry, "host", slug);
259
- const httpPath = resolveEnvVar(entry, "http-path", slug);
260
- const token = resolveEnvVar(entry, "token", slug);
261
- return {
262
- async query(sql) {
263
- const { DBSQLClient } = await import("@databricks/sql");
264
- const client = new DBSQLClient();
265
- await client.connect({ host, path: httpPath, token });
266
- try {
267
- const session = await client.openSession();
268
- try {
269
- const operation = await session.executeStatement(sql);
270
- const result = await operation.fetchAll();
271
- await operation.close();
272
- return { rows: result };
273
- } finally {
274
- await session.close();
275
- }
276
- } finally {
277
- await client.close();
278
- }
279
- }
280
- };
281
- }
282
-
283
108
  // src/connector-client/registry.ts
284
109
  function createConnectorRegistry() {
285
110
  let connectionsCache = null;
@@ -287,7 +112,7 @@ function createConnectorRegistry() {
287
112
  function getConnectionsFilePath() {
288
113
  return process.env.CONNECTIONS_PATH ?? path.join(process.cwd(), "../../.squadbase/connections.json");
289
114
  }
290
- function loadConnections2() {
115
+ function loadConnections() {
291
116
  if (connectionsCache !== null) return connectionsCache;
292
117
  const filePath = getConnectionsFilePath();
293
118
  try {
@@ -311,7 +136,7 @@ function createConnectorRegistry() {
311
136
  }
312
137
  const cached = clientCache.get(connectorSlug);
313
138
  if (cached) return cached;
314
- const connections = loadConnections2();
139
+ const connections = loadConnections();
315
140
  const entry = connections[connectorSlug];
316
141
  if (!entry) {
317
142
  throw new Error(`connector slug '${connectorSlug}' not found in .squadbase/connections.json`);
@@ -328,20 +153,6 @@ function createConnectorRegistry() {
328
153
  if (resolvedType === "bigquery") {
329
154
  return createBigQueryClient(entry, connectorSlug);
330
155
  }
331
- if (resolvedType === "athena") {
332
- return createAthenaClient(entry, connectorSlug);
333
- }
334
- if (resolvedType === "redshift") {
335
- return createRedshiftClient(entry, connectorSlug);
336
- }
337
- if (resolvedType === "databricks") {
338
- return createDatabricksClient(entry, connectorSlug);
339
- }
340
- if (resolvedType === "mysql") {
341
- const client = createMySQLClient(entry, connectorSlug);
342
- clientCache.set(connectorSlug, client);
343
- return client;
344
- }
345
156
  if (resolvedType === "postgresql" || resolvedType === "squadbase-db") {
346
157
  const urlEnvName = entry.envVars["connection-url"];
347
158
  if (!urlEnvName) {
@@ -358,7 +169,7 @@ function createConnectorRegistry() {
358
169
  return client;
359
170
  }
360
171
  throw new Error(
361
- `connector type '${resolvedType}' is not supported as a SQL connector. Supported SQL types: "postgresql", "squadbase-db", "mysql", "snowflake", "bigquery", "athena", "redshift", "databricks". Non-SQL types (airtable, google-analytics, kintone, wix-store, dbt) should be used via TypeScript handlers.`
172
+ `connector type '${resolvedType}' is not supported. Supported: "snowflake", "bigquery", "postgresql", "squadbase-db"`
362
173
  );
363
174
  }
364
175
  function reloadEnvFile2(envPath) {
@@ -390,14 +201,15 @@ function createConnectorRegistry() {
390
201
  } catch {
391
202
  }
392
203
  }
393
- return { getClient: getClient2, loadConnections: loadConnections2, reloadEnvFile: reloadEnvFile2, watchConnectionsFile: watchConnectionsFile2 };
204
+ return { getClient: getClient2, reloadEnvFile: reloadEnvFile2, watchConnectionsFile: watchConnectionsFile2 };
394
205
  }
395
206
 
396
207
  // src/connector-client/index.ts
397
- var { getClient, loadConnections, reloadEnvFile, watchConnectionsFile } = createConnectorRegistry();
208
+ var { getClient, reloadEnvFile, watchConnectionsFile } = createConnectorRegistry();
398
209
 
399
210
  // src/registry.ts
400
211
  var dataSources = /* @__PURE__ */ new Map();
212
+ var currentDirPath = "";
401
213
  var viteServer = null;
402
214
  function validateHandlerPath(dirPath, handlerPath) {
403
215
  const absolute = path2.resolve(dirPath, handlerPath);
@@ -452,6 +264,7 @@ async function initialize() {
452
264
  );
453
265
  dataSources.clear();
454
266
  const dirPath = process.env.DATA_SOURCE_DIR || defaultDataSourceDir;
267
+ currentDirPath = dirPath;
455
268
  await mkdir(dirPath, { recursive: true });
456
269
  const files = await readdir(dirPath);
457
270
  const jsonFiles = files.filter((f) => f.endsWith(".json"));
@@ -495,12 +308,14 @@ async function initialize() {
495
308
  response: sqlDef.response,
496
309
  connectorSlug: sqlDef.connectorSlug,
497
310
  cacheConfig: sqlDef.cache,
311
+ _query: sqlDef.query,
312
+ _connectorType: sqlDef.connectorType,
498
313
  handler: async (runtimeParams) => {
499
314
  const client = await getClient(sqlDef.connectorSlug, sqlDef.connectorType);
500
- const isLiteralConnector = sqlDef.connectorType === "snowflake" || sqlDef.connectorType === "bigquery" || sqlDef.connectorType === "athena" || sqlDef.connectorType === "redshift" || sqlDef.connectorType === "databricks";
315
+ const isExternalConnector = sqlDef.connectorType === "snowflake" || sqlDef.connectorType === "bigquery";
501
316
  let queryText;
502
317
  let queryValues;
503
- if (isLiteralConnector) {
318
+ if (isExternalConnector) {
504
319
  const defaults = new Map(
505
320
  (sqlDef.parameters ?? []).map((p) => [p.name, p.default ?? null])
506
321
  );
@@ -518,14 +333,6 @@ async function initialize() {
518
333
  }
519
334
  );
520
335
  queryValues = [];
521
- } else if (sqlDef.connectorType === "mysql") {
522
- const built = buildQuery(
523
- sqlDef.query,
524
- sqlDef.parameters ?? [],
525
- runtimeParams
526
- );
527
- queryText = built.text.replace(/\$(\d+)/g, "?");
528
- queryValues = built.values;
529
336
  } else {
530
337
  const built = buildQuery(
531
338
  sqlDef.query,
@@ -576,25 +383,29 @@ function startWatching() {
576
383
  function getDataSource(slug) {
577
384
  return dataSources.get(slug);
578
385
  }
579
- function getAllMeta() {
580
- return Array.from(dataSources.entries()).map(([slug, def]) => ({
386
+ function buildMeta(slug, def) {
387
+ return {
581
388
  slug,
582
389
  description: def.description,
390
+ type: def._isTypescript ? "typescript" : "sql",
583
391
  parameters: def.parameters,
584
392
  response: def.response,
585
- connectorSlug: def.connectorSlug
586
- }));
393
+ query: def._query,
394
+ connectorType: def._connectorType,
395
+ connectorSlug: def.connectorSlug,
396
+ handlerPath: def._tsHandlerPath ? path2.relative(currentDirPath, def._tsHandlerPath) : void 0,
397
+ cache: def.cacheConfig
398
+ };
399
+ }
400
+ function getAllMeta() {
401
+ return Array.from(dataSources.entries()).map(
402
+ ([slug, def]) => buildMeta(slug, def)
403
+ );
587
404
  }
588
405
  function getMeta(slug) {
589
406
  const def = dataSources.get(slug);
590
407
  if (!def) return void 0;
591
- return {
592
- slug,
593
- description: def.description,
594
- parameters: def.parameters,
595
- response: def.response,
596
- connectorSlug: def.connectorSlug
597
- };
408
+ return buildMeta(slug, def);
598
409
  }
599
410
 
600
411
  // src/routes/data-source.ts
@@ -7,14 +7,14 @@ interface ParameterMeta {
7
7
  }
8
8
  interface DataSourceCacheConfig {
9
9
  /**
10
- * Cache TTL in seconds.
11
- * 0 or unset means no caching (default behavior for backward compatibility).
10
+ * キャッシュの有効期間(秒)。
11
+ * 0 または未指定の場合はキャッシュしない(後方互換性を保つデフォルト動作)。
12
12
  */
13
13
  ttl: number;
14
14
  /**
15
- * When true, stale data is returned immediately after TTL expiry
16
- * while fresh data is fetched asynchronously in the background to update the cache.
17
- * Default: false
15
+ * true の場合、TTL 期限切れ後も古いデータを即座に返しつつ、
16
+ * バックグラウンドで新しいデータを非同期取得してキャッシュを更新する。
17
+ * デフォルト: false
18
18
  */
19
19
  staleWhileRevalidate?: boolean;
20
20
  }
@@ -52,13 +52,20 @@ interface DataSourceDefinition {
52
52
  handler: (params: Record<string, unknown>) => Promise<unknown> | unknown;
53
53
  _isTypescript?: boolean;
54
54
  _tsHandlerPath?: string;
55
+ _query?: string;
56
+ _connectorType?: string;
55
57
  }
56
58
  interface DataSourceMeta {
57
59
  slug: string;
58
60
  description: string;
61
+ type: "sql" | "typescript";
59
62
  parameters: ParameterMeta[];
60
63
  response?: DataSourceResponse;
64
+ query?: string;
65
+ connectorType?: string;
61
66
  connectorSlug?: string;
67
+ handlerPath?: string;
68
+ cache?: DataSourceCacheConfig;
62
69
  }
63
70
  interface JsonDataSourceDefinition {
64
71
  description: string;
@@ -2,8 +2,6 @@
2
2
  import buildPlugin from "@hono/vite-build/node";
3
3
  import devServer from "@hono/vite-dev-server";
4
4
  import nodeAdapter from "@hono/vite-dev-server/node";
5
- import { fileURLToPath } from "url";
6
- import path3 from "path";
7
5
 
8
6
  // src/registry.ts
9
7
  import { readdir, readFile, mkdir } from "fs/promises";
@@ -39,11 +37,6 @@ function resolveEnvVar(entry, key, slug) {
39
37
  }
40
38
  return value;
41
39
  }
42
- function resolveEnvVarOptional(entry, key) {
43
- const envVarName = entry.envVars[key];
44
- if (!envVarName) return void 0;
45
- return process.env[envVarName] || void 0;
46
- }
47
40
 
48
41
  // src/connector-client/bigquery.ts
49
42
  function createBigQueryClient(entry, slug) {
@@ -112,176 +105,6 @@ function createSnowflakeClient(entry, slug) {
112
105
  };
113
106
  }
114
107
 
115
- // src/connector-client/mysql.ts
116
- function createMySQLClient(entry, slug) {
117
- const connectionUrl = resolveEnvVar(entry, "connection-url", slug);
118
- let poolPromise = null;
119
- function getPool() {
120
- if (!poolPromise) {
121
- poolPromise = import("mysql2/promise").then(
122
- (mysql) => mysql.default.createPool(connectionUrl)
123
- );
124
- }
125
- return poolPromise;
126
- }
127
- return {
128
- async query(sql, params) {
129
- const pool = await getPool();
130
- const [rows] = await pool.execute(sql, params);
131
- return { rows };
132
- }
133
- };
134
- }
135
-
136
- // src/connector-client/aws-athena.ts
137
- function createAthenaClient(entry, slug) {
138
- const region = resolveEnvVar(entry, "aws-region", slug);
139
- const accessKeyId = resolveEnvVar(entry, "aws-access-key-id", slug);
140
- const secretAccessKey = resolveEnvVar(entry, "aws-secret-access-key", slug);
141
- const workgroup = resolveEnvVarOptional(entry, "workgroup") ?? "primary";
142
- const outputLocation = resolveEnvVarOptional(entry, "output-location");
143
- return {
144
- async query(sql) {
145
- const {
146
- AthenaClient,
147
- StartQueryExecutionCommand,
148
- GetQueryExecutionCommand,
149
- GetQueryResultsCommand
150
- } = await import("@aws-sdk/client-athena");
151
- const client = new AthenaClient({
152
- region,
153
- credentials: { accessKeyId, secretAccessKey }
154
- });
155
- const startParams = {
156
- QueryString: sql,
157
- WorkGroup: workgroup
158
- };
159
- if (outputLocation) {
160
- startParams.ResultConfiguration = { OutputLocation: outputLocation };
161
- }
162
- const { QueryExecutionId } = await client.send(
163
- new StartQueryExecutionCommand(startParams)
164
- );
165
- if (!QueryExecutionId) throw new Error("Athena: failed to start query execution");
166
- while (true) {
167
- const { QueryExecution } = await client.send(
168
- new GetQueryExecutionCommand({ QueryExecutionId })
169
- );
170
- const state = QueryExecution?.Status?.State;
171
- if (state === "SUCCEEDED") break;
172
- if (state === "FAILED") {
173
- throw new Error(
174
- `Athena query failed: ${QueryExecution?.Status?.StateChangeReason ?? "unknown"}`
175
- );
176
- }
177
- if (state === "CANCELLED") throw new Error("Athena query was cancelled");
178
- await new Promise((r) => setTimeout(r, 500));
179
- }
180
- const { ResultSet } = await client.send(
181
- new GetQueryResultsCommand({ QueryExecutionId })
182
- );
183
- const resultRows = ResultSet?.Rows ?? [];
184
- if (resultRows.length === 0) return { rows: [] };
185
- const headers = resultRows[0].Data?.map((d) => d.VarCharValue ?? "") ?? [];
186
- const rows = resultRows.slice(1).map((row) => {
187
- const obj = {};
188
- row.Data?.forEach((d, i) => {
189
- obj[headers[i]] = d.VarCharValue ?? null;
190
- });
191
- return obj;
192
- });
193
- return { rows };
194
- }
195
- };
196
- }
197
-
198
- // src/connector-client/redshift.ts
199
- function createRedshiftClient(entry, slug) {
200
- const region = resolveEnvVar(entry, "aws-region", slug);
201
- const accessKeyId = resolveEnvVar(entry, "aws-access-key-id", slug);
202
- const secretAccessKey = resolveEnvVar(entry, "aws-secret-access-key", slug);
203
- const database = resolveEnvVar(entry, "database", slug);
204
- const clusterIdentifier = resolveEnvVarOptional(entry, "cluster-identifier");
205
- const workgroupName = resolveEnvVarOptional(entry, "workgroup-name");
206
- const secretArn = resolveEnvVarOptional(entry, "secret-arn");
207
- const dbUser = resolveEnvVarOptional(entry, "db-user");
208
- return {
209
- async query(sql) {
210
- const {
211
- RedshiftDataClient,
212
- ExecuteStatementCommand,
213
- DescribeStatementCommand,
214
- GetStatementResultCommand
215
- } = await import("@aws-sdk/client-redshift-data");
216
- const client = new RedshiftDataClient({
217
- region,
218
- credentials: { accessKeyId, secretAccessKey }
219
- });
220
- const executeParams = {
221
- Sql: sql,
222
- Database: database
223
- };
224
- if (clusterIdentifier) executeParams.ClusterIdentifier = clusterIdentifier;
225
- if (workgroupName) executeParams.WorkgroupName = workgroupName;
226
- if (secretArn) executeParams.SecretArn = secretArn;
227
- if (dbUser) executeParams.DbUser = dbUser;
228
- const { Id } = await client.send(
229
- new ExecuteStatementCommand(executeParams)
230
- );
231
- if (!Id) throw new Error("Redshift: failed to start statement execution");
232
- while (true) {
233
- const desc = await client.send(new DescribeStatementCommand({ Id }));
234
- const status = desc.Status;
235
- if (status === "FINISHED") break;
236
- if (status === "FAILED") {
237
- throw new Error(`Redshift query failed: ${desc.Error ?? "unknown"}`);
238
- }
239
- if (status === "ABORTED") throw new Error("Redshift query was aborted");
240
- await new Promise((r) => setTimeout(r, 500));
241
- }
242
- const result = await client.send(new GetStatementResultCommand({ Id }));
243
- const columns = result.ColumnMetadata?.map((c) => c.name ?? "") ?? [];
244
- const rows = (result.Records ?? []).map((record) => {
245
- const obj = {};
246
- record.forEach((field, i) => {
247
- const col = columns[i];
248
- const value = field.stringValue ?? field.longValue ?? field.doubleValue ?? field.booleanValue ?? (field.isNull ? null : field.blobValue ?? null);
249
- obj[col] = value;
250
- });
251
- return obj;
252
- });
253
- return { rows };
254
- }
255
- };
256
- }
257
-
258
- // src/connector-client/databricks.ts
259
- function createDatabricksClient(entry, slug) {
260
- const host = resolveEnvVar(entry, "host", slug);
261
- const httpPath = resolveEnvVar(entry, "http-path", slug);
262
- const token = resolveEnvVar(entry, "token", slug);
263
- return {
264
- async query(sql) {
265
- const { DBSQLClient } = await import("@databricks/sql");
266
- const client = new DBSQLClient();
267
- await client.connect({ host, path: httpPath, token });
268
- try {
269
- const session = await client.openSession();
270
- try {
271
- const operation = await session.executeStatement(sql);
272
- const result = await operation.fetchAll();
273
- await operation.close();
274
- return { rows: result };
275
- } finally {
276
- await session.close();
277
- }
278
- } finally {
279
- await client.close();
280
- }
281
- }
282
- };
283
- }
284
-
285
108
  // src/connector-client/registry.ts
286
109
  function createConnectorRegistry() {
287
110
  let connectionsCache = null;
@@ -289,7 +112,7 @@ function createConnectorRegistry() {
289
112
  function getConnectionsFilePath() {
290
113
  return process.env.CONNECTIONS_PATH ?? path.join(process.cwd(), "../../.squadbase/connections.json");
291
114
  }
292
- function loadConnections2() {
115
+ function loadConnections() {
293
116
  if (connectionsCache !== null) return connectionsCache;
294
117
  const filePath = getConnectionsFilePath();
295
118
  try {
@@ -313,7 +136,7 @@ function createConnectorRegistry() {
313
136
  }
314
137
  const cached = clientCache.get(connectorSlug);
315
138
  if (cached) return cached;
316
- const connections = loadConnections2();
139
+ const connections = loadConnections();
317
140
  const entry = connections[connectorSlug];
318
141
  if (!entry) {
319
142
  throw new Error(`connector slug '${connectorSlug}' not found in .squadbase/connections.json`);
@@ -330,20 +153,6 @@ function createConnectorRegistry() {
330
153
  if (resolvedType === "bigquery") {
331
154
  return createBigQueryClient(entry, connectorSlug);
332
155
  }
333
- if (resolvedType === "athena") {
334
- return createAthenaClient(entry, connectorSlug);
335
- }
336
- if (resolvedType === "redshift") {
337
- return createRedshiftClient(entry, connectorSlug);
338
- }
339
- if (resolvedType === "databricks") {
340
- return createDatabricksClient(entry, connectorSlug);
341
- }
342
- if (resolvedType === "mysql") {
343
- const client = createMySQLClient(entry, connectorSlug);
344
- clientCache.set(connectorSlug, client);
345
- return client;
346
- }
347
156
  if (resolvedType === "postgresql" || resolvedType === "squadbase-db") {
348
157
  const urlEnvName = entry.envVars["connection-url"];
349
158
  if (!urlEnvName) {
@@ -360,7 +169,7 @@ function createConnectorRegistry() {
360
169
  return client;
361
170
  }
362
171
  throw new Error(
363
- `connector type '${resolvedType}' is not supported as a SQL connector. Supported SQL types: "postgresql", "squadbase-db", "mysql", "snowflake", "bigquery", "athena", "redshift", "databricks". Non-SQL types (airtable, google-analytics, kintone, wix-store, dbt) should be used via TypeScript handlers.`
172
+ `connector type '${resolvedType}' is not supported. Supported: "snowflake", "bigquery", "postgresql", "squadbase-db"`
364
173
  );
365
174
  }
366
175
  function reloadEnvFile2(envPath) {
@@ -392,11 +201,11 @@ function createConnectorRegistry() {
392
201
  } catch {
393
202
  }
394
203
  }
395
- return { getClient: getClient2, loadConnections: loadConnections2, reloadEnvFile: reloadEnvFile2, watchConnectionsFile: watchConnectionsFile2 };
204
+ return { getClient: getClient2, reloadEnvFile: reloadEnvFile2, watchConnectionsFile: watchConnectionsFile2 };
396
205
  }
397
206
 
398
207
  // src/connector-client/index.ts
399
- var { getClient, loadConnections, reloadEnvFile, watchConnectionsFile } = createConnectorRegistry();
208
+ var { getClient, reloadEnvFile, watchConnectionsFile } = createConnectorRegistry();
400
209
 
401
210
  // src/registry.ts
402
211
  var viteServer = null;
@@ -417,17 +226,6 @@ var DEFAULT_EXCLUDE = [
417
226
  /^\/node_modules\/.*/,
418
227
  /^(?!\/api)/
419
228
  ];
420
- function resolveEntry(entry) {
421
- if (entry.startsWith(".") || entry.startsWith("/")) return entry;
422
- try {
423
- const resolvedUrl = import.meta.resolve(entry);
424
- const absolutePath = fileURLToPath(resolvedUrl);
425
- const relativePath = path3.relative(process.cwd(), absolutePath).replace(/\\/g, "/");
426
- return "./" + relativePath;
427
- } catch {
428
- return entry;
429
- }
430
- }
431
229
  function squadbasePlugin(options = {}) {
432
230
  const {
433
231
  buildEntry = "@squadbase/vite-server/main",
@@ -438,7 +236,7 @@ function squadbasePlugin(options = {}) {
438
236
  } = options;
439
237
  const isServerBuild = (_, { command, mode }) => command === "build" && mode !== "client";
440
238
  const rawBuildPlugin = buildPlugin({
441
- entry: resolveEntry(buildEntry),
239
+ entry: buildEntry,
442
240
  outputDir: "./dist/server",
443
241
  output: "index.js",
444
242
  port,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@squadbase/vite-server",
3
- "version": "0.0.1-build-3",
3
+ "version": "0.0.1-build-4",
4
4
  "type": "module",
5
5
  "exports": {
6
6
  ".": {
@@ -30,21 +30,14 @@
30
30
  "scripts": {
31
31
  "cli": "tsx src/cli/index.ts",
32
32
  "build": "tsup && pnpm run build:cli",
33
- "build:cli": "tsup src/cli/index.ts --out-dir dist/cli --format esm --platform node --no-splitting --external pg --external snowflake-sdk --external @google-cloud/bigquery --external mysql2 --external @aws-sdk/client-athena --external @aws-sdk/client-redshift-data --external @databricks/sql --external @google-analytics/data --external @kintone/rest-api-client --external hono --external @clack/prompts"
33
+ "build:cli": "tsup src/cli/index.ts --out-dir dist/cli --format esm --platform node --no-splitting --external pg --external snowflake-sdk --external @google-cloud/bigquery --external hono --external @clack/prompts"
34
34
  },
35
35
  "dependencies": {
36
- "@aws-sdk/client-athena": "^3.750.0",
37
- "@aws-sdk/client-redshift-data": "^3.750.0",
38
- "@databricks/sql": "^1.8.0",
39
-
40
- "@google-analytics/data": "^4.8.0",
41
36
  "@google-cloud/bigquery": "^7.9.4",
42
37
  "@hono/node-server": "^1.19.9",
43
38
  "@hono/vite-build": "^1.10.0",
44
39
  "@hono/vite-dev-server": "^0.18.0",
45
- "@kintone/rest-api-client": "^5.5.0",
46
40
  "hono": "^4.11.9",
47
- "mysql2": "^3.11.0",
48
41
  "pg": "^8.18.0",
49
42
  "snowflake-sdk": "^1.15.0"
50
43
  },