@squadbase/vite-server 0.0.1-build-3 → 0.0.1-build-5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/main.js CHANGED
@@ -26,34 +26,29 @@ function createPostgreSQLClient(connectionString) {
26
26
  }
27
27
 
28
28
  // src/connector-client/env.ts
29
- function resolveEnvVar(entry, key, slug) {
29
+ function resolveEnvVar(entry, key, connectionId) {
30
30
  const envVarName = entry.envVars[key];
31
31
  if (!envVarName) {
32
- throw new Error(`Connector "${slug}" is missing envVars mapping for key "${key}"`);
32
+ throw new Error(`Connection "${connectionId}" is missing envVars mapping for key "${key}"`);
33
33
  }
34
34
  const value = process.env[envVarName];
35
35
  if (!value) {
36
- throw new Error(`Environment variable "${envVarName}" (for "${slug}.${key}") is not set`);
36
+ throw new Error(`Environment variable "${envVarName}" (for connection "${connectionId}", key "${key}") is not set`);
37
37
  }
38
38
  return value;
39
39
  }
40
- function resolveEnvVarOptional(entry, key) {
41
- const envVarName = entry.envVars[key];
42
- if (!envVarName) return void 0;
43
- return process.env[envVarName] || void 0;
44
- }
45
40
 
46
41
  // src/connector-client/bigquery.ts
47
- function createBigQueryClient(entry, slug) {
48
- const projectId = resolveEnvVar(entry, "project-id", slug);
49
- const serviceAccountJsonBase64 = resolveEnvVar(entry, "service-account-json-base64", slug);
42
+ function createBigQueryClient(entry, connectionId) {
43
+ const projectId = resolveEnvVar(entry, "project-id", connectionId);
44
+ const serviceAccountJsonBase64 = resolveEnvVar(entry, "service-account-json-base64", connectionId);
50
45
  const serviceAccountJson = Buffer.from(serviceAccountJsonBase64, "base64").toString("utf-8");
51
46
  let gcpCredentials;
52
47
  try {
53
48
  gcpCredentials = JSON.parse(serviceAccountJson);
54
49
  } catch {
55
50
  throw new Error(
56
- `BigQuery service account JSON (decoded from base64) is not valid JSON for slug "${slug}"`
51
+ `BigQuery service account JSON (decoded from base64) is not valid JSON for connection "${connectionId}"`
57
52
  );
58
53
  }
59
54
  return {
@@ -68,12 +63,12 @@ function createBigQueryClient(entry, slug) {
68
63
  }
69
64
 
70
65
  // src/connector-client/snowflake.ts
71
- function createSnowflakeClient(entry, slug) {
72
- const accountIdentifier = resolveEnvVar(entry, "account", slug);
73
- const user = resolveEnvVar(entry, "user", slug);
74
- const role = resolveEnvVar(entry, "role", slug);
75
- const warehouse = resolveEnvVar(entry, "warehouse", slug);
76
- const privateKeyBase64 = resolveEnvVar(entry, "private-key-base64", slug);
66
+ function createSnowflakeClient(entry, connectionId) {
67
+ const accountIdentifier = resolveEnvVar(entry, "account", connectionId);
68
+ const user = resolveEnvVar(entry, "user", connectionId);
69
+ const role = resolveEnvVar(entry, "role", connectionId);
70
+ const warehouse = resolveEnvVar(entry, "warehouse", connectionId);
71
+ const privateKeyBase64 = resolveEnvVar(entry, "private-key-base64", connectionId);
77
72
  const privateKey = Buffer.from(privateKeyBase64, "base64").toString("utf-8");
78
73
  return {
79
74
  async query(sql) {
@@ -110,176 +105,6 @@ function createSnowflakeClient(entry, slug) {
110
105
  };
111
106
  }
112
107
 
113
- // src/connector-client/mysql.ts
114
- function createMySQLClient(entry, slug) {
115
- const connectionUrl = resolveEnvVar(entry, "connection-url", slug);
116
- let poolPromise = null;
117
- function getPool() {
118
- if (!poolPromise) {
119
- poolPromise = import("mysql2/promise").then(
120
- (mysql) => mysql.default.createPool(connectionUrl)
121
- );
122
- }
123
- return poolPromise;
124
- }
125
- return {
126
- async query(sql, params) {
127
- const pool = await getPool();
128
- const [rows] = await pool.execute(sql, params);
129
- return { rows };
130
- }
131
- };
132
- }
133
-
134
- // src/connector-client/aws-athena.ts
135
- function createAthenaClient(entry, slug) {
136
- const region = resolveEnvVar(entry, "aws-region", slug);
137
- const accessKeyId = resolveEnvVar(entry, "aws-access-key-id", slug);
138
- const secretAccessKey = resolveEnvVar(entry, "aws-secret-access-key", slug);
139
- const workgroup = resolveEnvVarOptional(entry, "workgroup") ?? "primary";
140
- const outputLocation = resolveEnvVarOptional(entry, "output-location");
141
- return {
142
- async query(sql) {
143
- const {
144
- AthenaClient,
145
- StartQueryExecutionCommand,
146
- GetQueryExecutionCommand,
147
- GetQueryResultsCommand
148
- } = await import("@aws-sdk/client-athena");
149
- const client = new AthenaClient({
150
- region,
151
- credentials: { accessKeyId, secretAccessKey }
152
- });
153
- const startParams = {
154
- QueryString: sql,
155
- WorkGroup: workgroup
156
- };
157
- if (outputLocation) {
158
- startParams.ResultConfiguration = { OutputLocation: outputLocation };
159
- }
160
- const { QueryExecutionId } = await client.send(
161
- new StartQueryExecutionCommand(startParams)
162
- );
163
- if (!QueryExecutionId) throw new Error("Athena: failed to start query execution");
164
- while (true) {
165
- const { QueryExecution } = await client.send(
166
- new GetQueryExecutionCommand({ QueryExecutionId })
167
- );
168
- const state = QueryExecution?.Status?.State;
169
- if (state === "SUCCEEDED") break;
170
- if (state === "FAILED") {
171
- throw new Error(
172
- `Athena query failed: ${QueryExecution?.Status?.StateChangeReason ?? "unknown"}`
173
- );
174
- }
175
- if (state === "CANCELLED") throw new Error("Athena query was cancelled");
176
- await new Promise((r) => setTimeout(r, 500));
177
- }
178
- const { ResultSet } = await client.send(
179
- new GetQueryResultsCommand({ QueryExecutionId })
180
- );
181
- const resultRows = ResultSet?.Rows ?? [];
182
- if (resultRows.length === 0) return { rows: [] };
183
- const headers = resultRows[0].Data?.map((d) => d.VarCharValue ?? "") ?? [];
184
- const rows = resultRows.slice(1).map((row) => {
185
- const obj = {};
186
- row.Data?.forEach((d, i) => {
187
- obj[headers[i]] = d.VarCharValue ?? null;
188
- });
189
- return obj;
190
- });
191
- return { rows };
192
- }
193
- };
194
- }
195
-
196
- // src/connector-client/redshift.ts
197
- function createRedshiftClient(entry, slug) {
198
- const region = resolveEnvVar(entry, "aws-region", slug);
199
- const accessKeyId = resolveEnvVar(entry, "aws-access-key-id", slug);
200
- const secretAccessKey = resolveEnvVar(entry, "aws-secret-access-key", slug);
201
- const database = resolveEnvVar(entry, "database", slug);
202
- const clusterIdentifier = resolveEnvVarOptional(entry, "cluster-identifier");
203
- const workgroupName = resolveEnvVarOptional(entry, "workgroup-name");
204
- const secretArn = resolveEnvVarOptional(entry, "secret-arn");
205
- const dbUser = resolveEnvVarOptional(entry, "db-user");
206
- return {
207
- async query(sql) {
208
- const {
209
- RedshiftDataClient,
210
- ExecuteStatementCommand,
211
- DescribeStatementCommand,
212
- GetStatementResultCommand
213
- } = await import("@aws-sdk/client-redshift-data");
214
- const client = new RedshiftDataClient({
215
- region,
216
- credentials: { accessKeyId, secretAccessKey }
217
- });
218
- const executeParams = {
219
- Sql: sql,
220
- Database: database
221
- };
222
- if (clusterIdentifier) executeParams.ClusterIdentifier = clusterIdentifier;
223
- if (workgroupName) executeParams.WorkgroupName = workgroupName;
224
- if (secretArn) executeParams.SecretArn = secretArn;
225
- if (dbUser) executeParams.DbUser = dbUser;
226
- const { Id } = await client.send(
227
- new ExecuteStatementCommand(executeParams)
228
- );
229
- if (!Id) throw new Error("Redshift: failed to start statement execution");
230
- while (true) {
231
- const desc = await client.send(new DescribeStatementCommand({ Id }));
232
- const status = desc.Status;
233
- if (status === "FINISHED") break;
234
- if (status === "FAILED") {
235
- throw new Error(`Redshift query failed: ${desc.Error ?? "unknown"}`);
236
- }
237
- if (status === "ABORTED") throw new Error("Redshift query was aborted");
238
- await new Promise((r) => setTimeout(r, 500));
239
- }
240
- const result = await client.send(new GetStatementResultCommand({ Id }));
241
- const columns = result.ColumnMetadata?.map((c) => c.name ?? "") ?? [];
242
- const rows = (result.Records ?? []).map((record) => {
243
- const obj = {};
244
- record.forEach((field, i) => {
245
- const col = columns[i];
246
- const value = field.stringValue ?? field.longValue ?? field.doubleValue ?? field.booleanValue ?? (field.isNull ? null : field.blobValue ?? null);
247
- obj[col] = value;
248
- });
249
- return obj;
250
- });
251
- return { rows };
252
- }
253
- };
254
- }
255
-
256
- // src/connector-client/databricks.ts
257
- function createDatabricksClient(entry, slug) {
258
- const host = resolveEnvVar(entry, "host", slug);
259
- const httpPath = resolveEnvVar(entry, "http-path", slug);
260
- const token = resolveEnvVar(entry, "token", slug);
261
- return {
262
- async query(sql) {
263
- const { DBSQLClient } = await import("@databricks/sql");
264
- const client = new DBSQLClient();
265
- await client.connect({ host, path: httpPath, token });
266
- try {
267
- const session = await client.openSession();
268
- try {
269
- const operation = await session.executeStatement(sql);
270
- const result = await operation.fetchAll();
271
- await operation.close();
272
- return { rows: result };
273
- } finally {
274
- await session.close();
275
- }
276
- } finally {
277
- await client.close();
278
- }
279
- }
280
- };
281
- }
282
-
283
108
  // src/connector-client/registry.ts
284
109
  function createConnectorRegistry() {
285
110
  let connectionsCache = null;
@@ -287,7 +112,7 @@ function createConnectorRegistry() {
287
112
  function getConnectionsFilePath() {
288
113
  return process.env.CONNECTIONS_PATH ?? path.join(process.cwd(), "../../.squadbase/connections.json");
289
114
  }
290
- function loadConnections2() {
115
+ function loadConnections() {
291
116
  if (connectionsCache !== null) return connectionsCache;
292
117
  const filePath = getConnectionsFilePath();
293
118
  try {
@@ -298,67 +123,38 @@ function createConnectorRegistry() {
298
123
  }
299
124
  return connectionsCache;
300
125
  }
301
- async function getClient2(connectorSlug, connectorType) {
302
- if (!connectorSlug) {
303
- const cacheKey = "__squadbase-db__";
304
- const cached2 = clientCache.get(cacheKey);
305
- if (cached2) return cached2;
306
- const url = process.env.SQUADBASE_POSTGRESQL_URL;
307
- if (!url) throw new Error("SQUADBASE_POSTGRESQL_URL environment variable is not set");
308
- const client = createPostgreSQLClient(url);
309
- clientCache.set(cacheKey, client);
310
- return client;
311
- }
312
- const cached = clientCache.get(connectorSlug);
313
- if (cached) return cached;
314
- const connections = loadConnections2();
315
- const entry = connections[connectorSlug];
126
+ async function getClient2(connectionId) {
127
+ const connections = loadConnections();
128
+ const entry = connections[connectionId];
316
129
  if (!entry) {
317
- throw new Error(`connector slug '${connectorSlug}' not found in .squadbase/connections.json`);
318
- }
319
- const resolvedType = connectorType ?? entry.connectorType;
320
- if (!resolvedType) {
321
- throw new Error(
322
- `connector type could not be determined for slug '${connectorSlug}'. Specify connectorType in the data-source JSON or in .squadbase/connections.json.`
323
- );
324
- }
325
- if (resolvedType === "snowflake") {
326
- return createSnowflakeClient(entry, connectorSlug);
327
- }
328
- if (resolvedType === "bigquery") {
329
- return createBigQueryClient(entry, connectorSlug);
130
+ throw new Error(`connection '${connectionId}' not found in .squadbase/connections.json`);
330
131
  }
331
- if (resolvedType === "athena") {
332
- return createAthenaClient(entry, connectorSlug);
132
+ const connectorSlug = entry.connector.slug;
133
+ const cached = clientCache.get(connectionId);
134
+ if (cached) return { client: cached, connectorSlug };
135
+ if (connectorSlug === "snowflake") {
136
+ return { client: createSnowflakeClient(entry, connectionId), connectorSlug };
333
137
  }
334
- if (resolvedType === "redshift") {
335
- return createRedshiftClient(entry, connectorSlug);
138
+ if (connectorSlug === "bigquery") {
139
+ return { client: createBigQueryClient(entry, connectionId), connectorSlug };
336
140
  }
337
- if (resolvedType === "databricks") {
338
- return createDatabricksClient(entry, connectorSlug);
339
- }
340
- if (resolvedType === "mysql") {
341
- const client = createMySQLClient(entry, connectorSlug);
342
- clientCache.set(connectorSlug, client);
343
- return client;
344
- }
345
- if (resolvedType === "postgresql" || resolvedType === "squadbase-db") {
141
+ if (connectorSlug === "postgresql" || connectorSlug === "squadbase-db") {
346
142
  const urlEnvName = entry.envVars["connection-url"];
347
143
  if (!urlEnvName) {
348
- throw new Error(`'connection-url' is not defined in envVars for connector '${connectorSlug}'`);
144
+ throw new Error(`'connection-url' is not defined in envVars for connection '${connectionId}'`);
349
145
  }
350
146
  const connectionUrl = process.env[urlEnvName];
351
147
  if (!connectionUrl) {
352
148
  throw new Error(
353
- `environment variable '${urlEnvName}' (mapped from connector '${connectorSlug}') is not set`
149
+ `environment variable '${urlEnvName}' (mapped from connection '${connectionId}') is not set`
354
150
  );
355
151
  }
356
152
  const client = createPostgreSQLClient(connectionUrl);
357
- clientCache.set(connectorSlug, client);
358
- return client;
153
+ clientCache.set(connectionId, client);
154
+ return { client, connectorSlug };
359
155
  }
360
156
  throw new Error(
361
- `connector type '${resolvedType}' is not supported as a SQL connector. Supported SQL types: "postgresql", "squadbase-db", "mysql", "snowflake", "bigquery", "athena", "redshift", "databricks". Non-SQL types (airtable, google-analytics, kintone, wix-store, dbt) should be used via TypeScript handlers.`
157
+ `connector type '${connectorSlug}' is not supported. Supported: "snowflake", "bigquery", "postgresql", "squadbase-db"`
362
158
  );
363
159
  }
364
160
  function reloadEnvFile2(envPath) {
@@ -390,14 +186,15 @@ function createConnectorRegistry() {
390
186
  } catch {
391
187
  }
392
188
  }
393
- return { getClient: getClient2, loadConnections: loadConnections2, reloadEnvFile: reloadEnvFile2, watchConnectionsFile: watchConnectionsFile2 };
189
+ return { getClient: getClient2, reloadEnvFile: reloadEnvFile2, watchConnectionsFile: watchConnectionsFile2 };
394
190
  }
395
191
 
396
192
  // src/connector-client/index.ts
397
- var { getClient, loadConnections, reloadEnvFile, watchConnectionsFile } = createConnectorRegistry();
193
+ var { getClient, reloadEnvFile, watchConnectionsFile } = createConnectorRegistry();
398
194
 
399
195
  // src/registry.ts
400
196
  var dataSources = /* @__PURE__ */ new Map();
197
+ var currentDirPath = "";
401
198
  var viteServer = null;
402
199
  function validateHandlerPath(dirPath, handlerPath) {
403
200
  const absolute = path2.resolve(dirPath, handlerPath);
@@ -452,6 +249,7 @@ async function initialize() {
452
249
  );
453
250
  dataSources.clear();
454
251
  const dirPath = process.env.DATA_SOURCE_DIR || defaultDataSourceDir;
252
+ currentDirPath = dirPath;
455
253
  await mkdir(dirPath, { recursive: true });
456
254
  const files = await readdir(dirPath);
457
255
  const jsonFiles = files.filter((f) => f.endsWith(".json"));
@@ -464,6 +262,10 @@ async function initialize() {
464
262
  console.warn(`[registry] Skipping ${file}: missing description`);
465
263
  return;
466
264
  }
265
+ if (!def.connectionId) {
266
+ console.warn(`[registry] Skipping ${file}: missing connectionId`);
267
+ return;
268
+ }
467
269
  if (def.type === "typescript") {
468
270
  if (!def.handlerPath) {
469
271
  console.warn(`[registry] Skipping ${file}: missing handlerPath`);
@@ -474,6 +276,7 @@ async function initialize() {
474
276
  description: def.description,
475
277
  parameters: def.parameters ?? [],
476
278
  response: def.response,
279
+ connectionId: def.connectionId,
477
280
  cacheConfig: def.cache,
478
281
  handler: async () => {
479
282
  throw new Error("TypeScript handler must be called via _tsHandlerPath");
@@ -493,14 +296,15 @@ async function initialize() {
493
296
  description: sqlDef.description,
494
297
  parameters: sqlDef.parameters ?? [],
495
298
  response: sqlDef.response,
496
- connectorSlug: sqlDef.connectorSlug,
299
+ connectionId: sqlDef.connectionId,
497
300
  cacheConfig: sqlDef.cache,
301
+ _query: sqlDef.query,
498
302
  handler: async (runtimeParams) => {
499
- const client = await getClient(sqlDef.connectorSlug, sqlDef.connectorType);
500
- const isLiteralConnector = sqlDef.connectorType === "snowflake" || sqlDef.connectorType === "bigquery" || sqlDef.connectorType === "athena" || sqlDef.connectorType === "redshift" || sqlDef.connectorType === "databricks";
303
+ const { client, connectorSlug } = await getClient(sqlDef.connectionId);
304
+ const isExternalConnector = connectorSlug === "snowflake" || connectorSlug === "bigquery";
501
305
  let queryText;
502
306
  let queryValues;
503
- if (isLiteralConnector) {
307
+ if (isExternalConnector) {
504
308
  const defaults = new Map(
505
309
  (sqlDef.parameters ?? []).map((p) => [p.name, p.default ?? null])
506
310
  );
@@ -518,14 +322,6 @@ async function initialize() {
518
322
  }
519
323
  );
520
324
  queryValues = [];
521
- } else if (sqlDef.connectorType === "mysql") {
522
- const built = buildQuery(
523
- sqlDef.query,
524
- sqlDef.parameters ?? [],
525
- runtimeParams
526
- );
527
- queryText = built.text.replace(/\$(\d+)/g, "?");
528
- queryValues = built.values;
529
325
  } else {
530
326
  const built = buildQuery(
531
327
  sqlDef.query,
@@ -576,25 +372,28 @@ function startWatching() {
576
372
  function getDataSource(slug) {
577
373
  return dataSources.get(slug);
578
374
  }
579
- function getAllMeta() {
580
- return Array.from(dataSources.entries()).map(([slug, def]) => ({
375
+ function buildMeta(slug, def) {
376
+ return {
581
377
  slug,
582
378
  description: def.description,
379
+ type: def._isTypescript ? "typescript" : "sql",
583
380
  parameters: def.parameters,
584
381
  response: def.response,
585
- connectorSlug: def.connectorSlug
586
- }));
382
+ query: def._query,
383
+ connectionId: def.connectionId,
384
+ handlerPath: def._tsHandlerPath ? path2.relative(currentDirPath, def._tsHandlerPath) : void 0,
385
+ cache: def.cacheConfig
386
+ };
387
+ }
388
+ function getAllMeta() {
389
+ return Array.from(dataSources.entries()).map(
390
+ ([slug, def]) => buildMeta(slug, def)
391
+ );
587
392
  }
588
393
  function getMeta(slug) {
589
394
  const def = dataSources.get(slug);
590
395
  if (!def) return void 0;
591
- return {
592
- slug,
593
- description: def.description,
594
- parameters: def.parameters,
595
- response: def.response,
596
- connectorSlug: def.connectorSlug
597
- };
396
+ return buildMeta(slug, def);
598
397
  }
599
398
 
600
399
  // src/routes/data-source.ts
@@ -7,14 +7,14 @@ interface ParameterMeta {
7
7
  }
8
8
  interface DataSourceCacheConfig {
9
9
  /**
10
- * Cache TTL in seconds.
11
- * 0 or unset means no caching (default behavior for backward compatibility).
10
+ * キャッシュの有効期間(秒)。
11
+ * 0 または未指定の場合はキャッシュしない(後方互換性を保つデフォルト動作)。
12
12
  */
13
13
  ttl: number;
14
14
  /**
15
- * When true, stale data is returned immediately after TTL expiry
16
- * while fresh data is fetched asynchronously in the background to update the cache.
17
- * Default: false
15
+ * true の場合、TTL 期限切れ後も古いデータを即座に返しつつ、
16
+ * バックグラウンドで新しいデータを非同期取得してキャッシュを更新する。
17
+ * デフォルト: false
18
18
  */
19
19
  staleWhileRevalidate?: boolean;
20
20
  }
@@ -47,18 +47,23 @@ interface DataSourceDefinition {
47
47
  description: string;
48
48
  parameters: ParameterMeta[];
49
49
  response?: DataSourceResponse;
50
- connectorSlug?: string;
50
+ connectionId: string;
51
51
  cacheConfig?: DataSourceCacheConfig;
52
52
  handler: (params: Record<string, unknown>) => Promise<unknown> | unknown;
53
53
  _isTypescript?: boolean;
54
54
  _tsHandlerPath?: string;
55
+ _query?: string;
55
56
  }
56
57
  interface DataSourceMeta {
57
58
  slug: string;
58
59
  description: string;
60
+ type: "sql" | "typescript";
59
61
  parameters: ParameterMeta[];
60
62
  response?: DataSourceResponse;
61
- connectorSlug?: string;
63
+ query?: string;
64
+ connectionId: string;
65
+ handlerPath?: string;
66
+ cache?: DataSourceCacheConfig;
62
67
  }
63
68
  interface JsonDataSourceDefinition {
64
69
  description: string;
@@ -66,14 +71,14 @@ interface JsonDataSourceDefinition {
66
71
  parameters?: ParameterMeta[];
67
72
  response?: DataSourceResponse;
68
73
  query: string;
69
- connectorType?: string;
70
- connectorSlug?: string;
74
+ connectionId: string;
71
75
  cache?: DataSourceCacheConfig;
72
76
  }
73
77
  interface JsonTypeScriptDataSourceDefinition {
74
78
  description: string;
75
79
  type: "typescript";
76
80
  handlerPath: string;
81
+ connectionId: string;
77
82
  parameters?: ParameterMeta[];
78
83
  response?: DataSourceResponse;
79
84
  cache?: DataSourceCacheConfig;