jl-db-comp 0.1.10__py3-none-any.whl → 0.1.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (18) hide show
  1. jl_db_comp/_version.py +1 -1
  2. jl_db_comp/routes.py +352 -247
  3. {jl_db_comp-0.1.10.data → jl_db_comp-0.1.11.data}/data/share/jupyter/labextensions/jl_db_comp/package.json +2 -2
  4. {jl_db_comp-0.1.10.data → jl_db_comp-0.1.11.data}/data/share/jupyter/labextensions/jl_db_comp/schemas/jl_db_comp/package.json.orig +1 -1
  5. jl_db_comp-0.1.11.data/data/share/jupyter/labextensions/jl_db_comp/static/171.c84e54337cfd177ebe22.js +1 -0
  6. jl_db_comp-0.1.10.data/data/share/jupyter/labextensions/jl_db_comp/static/remoteEntry.e1b6bbc28eb414e4e599.js → jl_db_comp-0.1.11.data/data/share/jupyter/labextensions/jl_db_comp/static/remoteEntry.2f90ff333d0ac2cbb0a8.js +1 -1
  7. {jl_db_comp-0.1.10.dist-info → jl_db_comp-0.1.11.dist-info}/METADATA +1 -1
  8. jl_db_comp-0.1.11.dist-info/RECORD +20 -0
  9. jl_db_comp-0.1.10.data/data/share/jupyter/labextensions/jl_db_comp/static/171.545011db1d7843ce3ab3.js +0 -1
  10. jl_db_comp-0.1.10.dist-info/RECORD +0 -20
  11. {jl_db_comp-0.1.10.data → jl_db_comp-0.1.11.data}/data/etc/jupyter/jupyter_server_config.d/jl_db_comp.json +0 -0
  12. {jl_db_comp-0.1.10.data → jl_db_comp-0.1.11.data}/data/share/jupyter/labextensions/jl_db_comp/install.json +0 -0
  13. {jl_db_comp-0.1.10.data → jl_db_comp-0.1.11.data}/data/share/jupyter/labextensions/jl_db_comp/schemas/jl_db_comp/plugin.json +0 -0
  14. {jl_db_comp-0.1.10.data → jl_db_comp-0.1.11.data}/data/share/jupyter/labextensions/jl_db_comp/static/728.6552504d5b9b27551bc5.js +0 -0
  15. {jl_db_comp-0.1.10.data → jl_db_comp-0.1.11.data}/data/share/jupyter/labextensions/jl_db_comp/static/style.js +0 -0
  16. {jl_db_comp-0.1.10.data → jl_db_comp-0.1.11.data}/data/share/jupyter/labextensions/jl_db_comp/static/third-party-licenses.json +0 -0
  17. {jl_db_comp-0.1.10.dist-info → jl_db_comp-0.1.11.dist-info}/WHEEL +0 -0
  18. {jl_db_comp-0.1.10.dist-info → jl_db_comp-0.1.11.dist-info}/licenses/LICENSE +0 -0
jl_db_comp/_version.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # This file is auto-generated by Hatchling. As such, do not:
2
2
  # - modify
3
3
  # - track in version control e.g. be sure to add to .gitignore
4
- __version__ = VERSION = '0.1.10'
4
+ __version__ = VERSION = '0.1.11'
jl_db_comp/routes.py CHANGED
@@ -1,4 +1,8 @@
1
+ import hashlib
1
2
  import json
3
+ import threading
4
+ import time
5
+ from contextlib import contextmanager
2
6
  from urllib.parse import unquote
3
7
 
4
8
  from jupyter_server.base.handlers import APIHandler
@@ -13,11 +17,133 @@ from .connections import (
13
17
 
14
18
  try:
15
19
  import psycopg2
20
+ import psycopg2.pool
21
+ from psycopg2 import sql as pgsql
16
22
  PSYCOPG2_AVAILABLE = True
17
23
  except ImportError:
18
24
  PSYCOPG2_AVAILABLE = False
19
25
 
20
26
 
27
+ # ---------------------------------------------------------------------------
28
+ # Connection pool
29
+ # ---------------------------------------------------------------------------
30
+
31
+ _pools: dict = {}
32
+ _pool_lock = threading.Lock()
33
+
34
+
35
+ @contextmanager
36
+ def _pooled_connection(db_url: str):
37
+ """Borrow a connection from a per-URL pool, return it on exit.
38
+
39
+ Creates the pool lazily on first use (minconn=1, maxconn=5).
40
+ Sets ``autocommit=True`` since all queries are read-only.
41
+ On ``OperationalError`` the connection is discarded instead of returned.
42
+ """
43
+ conn = None
44
+ discard = False
45
+ try:
46
+ with _pool_lock:
47
+ if db_url not in _pools:
48
+ _pools[db_url] = psycopg2.pool.ThreadedConnectionPool(
49
+ minconn=1, maxconn=5, dsn=db_url,
50
+ )
51
+ conn = _pools[db_url].getconn()
52
+ conn.autocommit = True
53
+ yield conn
54
+ except psycopg2.OperationalError:
55
+ discard = True
56
+ raise
57
+ finally:
58
+ if conn is not None:
59
+ try:
60
+ with _pool_lock:
61
+ pool = _pools.get(db_url)
62
+ if pool:
63
+ pool.putconn(conn, close=discard)
64
+ except Exception:
65
+ pass
66
+
67
+
68
+ # ---------------------------------------------------------------------------
69
+ # Metadata cache (TTL = 120 s)
70
+ # ---------------------------------------------------------------------------
71
+
72
+ class _MetadataCache:
73
+ """Thread-safe TTL cache for database metadata."""
74
+
75
+ def __init__(self, ttl_seconds: float = 120.0):
76
+ self._ttl = ttl_seconds
77
+ self._data: dict = {}
78
+ self._lock = threading.Lock()
79
+
80
+ def get(self, key: str):
81
+ """Return cached value, or ``None`` if missing / expired."""
82
+ with self._lock:
83
+ entry = self._data.get(key)
84
+ if entry is None:
85
+ return None
86
+ value, expiry = entry
87
+ if time.monotonic() > expiry:
88
+ del self._data[key]
89
+ return None
90
+ return value
91
+
92
+ def put(self, key: str, value):
93
+ """Store *value* with TTL starting now."""
94
+ with self._lock:
95
+ self._data[key] = (value, time.monotonic() + self._ttl)
96
+
97
+ def clear(self):
98
+ """Drop every entry."""
99
+ with self._lock:
100
+ self._data.clear()
101
+
102
+
103
+ _cache = _MetadataCache(ttl_seconds=120)
104
+
105
+
106
+ # ---------------------------------------------------------------------------
107
+ # Helpers
108
+ # ---------------------------------------------------------------------------
109
+
110
+ def _url_id(db_url: str) -> str:
111
+ """Short hash of a connection URL for use in cache keys."""
112
+ return hashlib.sha256(db_url.encode()).hexdigest()[:12]
113
+
114
+
115
+ def _filter_by_prefix(items: list, prefix: str) -> list:
116
+ """Return items whose ``name`` starts with *prefix* (case-insensitive)."""
117
+ if not prefix:
118
+ return items
119
+ lp = prefix.lower()
120
+ return [item for item in items if item["name"].lower().startswith(lp)]
121
+
122
+
123
+ def _jsonb_path_expr(column: str, path: list = None):
124
+ """Build a safe ``psycopg2.sql.Composable`` for a JSONB path.
125
+
126
+ ``_jsonb_path_expr("meta", ["a", "b"])`` produces
127
+ ``"meta"->'a'->'b'``.
128
+ """
129
+ expr = pgsql.Identifier(column)
130
+ for key in (path or []):
131
+ expr = pgsql.SQL("{0}->{1}").format(expr, pgsql.Literal(key))
132
+ return expr
133
+
134
+
135
+ def _jsonb_path_display(column: str, path: list = None) -> str:
136
+ """Human-readable version of the JSONB path (for diagnostics JSON)."""
137
+ result = column
138
+ for key in (path or []):
139
+ result = f"{result}->'{key}'"
140
+ return result
141
+
142
+
143
+ # ---------------------------------------------------------------------------
144
+ # Completions handler
145
+ # ---------------------------------------------------------------------------
146
+
21
147
  class PostgresCompletionsHandler(APIHandler):
22
148
  """Handler for fetching PostgreSQL table and column completions."""
23
149
 
@@ -30,8 +156,11 @@ class PostgresCompletionsHandler(APIHandler):
30
156
  - db_url: URL-encoded PostgreSQL connection string (fallback)
31
157
  - prefix: Optional prefix to filter results
32
158
  - schema: Database schema (default: 'public')
33
- - table: Optional table name to filter columns (only returns columns from this table)
34
- - schema_or_table: Ambiguous identifier - backend determines if it's a schema or table
159
+ - table: Optional table name to filter columns
160
+ - schema_or_table: Ambiguous identifier
161
+ - jsonb_column: JSONB column for key extraction
162
+ - jsonb_path: JSON-encoded path array for nested JSONB
163
+ - connections_file: Custom path to connections.ini
35
164
  """
36
165
  if not PSYCOPG2_AVAILABLE:
37
166
  self.set_status(500)
@@ -112,6 +241,8 @@ class PostgresCompletionsHandler(APIHandler):
112
241
  "columns": []
113
242
  }))
114
243
 
244
+ # -- core logic ---------------------------------------------------------
245
+
115
246
  def _fetch_completions(
116
247
  self,
117
248
  db_url: str,
@@ -120,144 +251,127 @@ class PostgresCompletionsHandler(APIHandler):
120
251
  table: str = None,
121
252
  schema_or_table: str = None,
122
253
  jsonb_column: str = None,
123
- jsonb_path: list = None
254
+ jsonb_path: list = None,
124
255
  ) -> dict:
125
256
  """Fetch table and column names from PostgreSQL.
126
257
 
127
- Args:
128
- db_url: PostgreSQL connection string
129
- schema: Database schema name
130
- prefix: Filter prefix (case-insensitive)
131
- table: Optional table name to filter columns (only returns columns from this table)
132
- schema_or_table: Ambiguous identifier - determine if it's a schema or table
133
- jsonb_column: Optional JSONB column to extract keys from
134
- jsonb_path: Optional path for nested JSONB key extraction
135
-
136
- Returns:
137
- Dictionary with tables, columns, and jsonbKeys arrays
258
+ Results are cached server-side (120 s TTL). Prefix filtering is
259
+ done in Python after the cache lookup — SQL queries fetch full
260
+ result sets so that subsequent keystrokes hit the cache.
138
261
  """
139
- conn = None
140
- try:
141
- conn = psycopg2.connect(db_url)
142
- cursor = conn.cursor()
262
+ uid = _url_id(db_url)
143
263
 
144
- tables = []
145
- columns = []
146
- jsonb_keys = []
264
+ # --- JSONB key extraction ---
265
+ if jsonb_column:
266
+ cache_key = (
267
+ f"jsonb:{uid}:{schema}:{schema_or_table or ''}:"
268
+ f"{jsonb_column}:{json.dumps(jsonb_path or [])}"
269
+ )
270
+ all_keys = _cache.get(cache_key)
271
+ if all_keys is None:
272
+ with _pooled_connection(db_url) as conn:
273
+ cur = conn.cursor()
274
+ all_keys = self._fetch_jsonb_keys(
275
+ cur, schema, schema_or_table, jsonb_column, jsonb_path,
276
+ )
277
+ cur.close()
278
+ _cache.put(cache_key, all_keys)
279
+ return {
280
+ "status": "success",
281
+ "tables": [],
282
+ "columns": [],
283
+ "jsonbKeys": _filter_by_prefix(all_keys, prefix),
284
+ }
147
285
 
148
- # Handle JSONB key extraction
149
- if jsonb_column:
150
- jsonb_keys = self._fetch_jsonb_keys(
151
- cursor, schema, schema_or_table, jsonb_column, jsonb_path, prefix
286
+ # --- schema_or_table disambiguation ---
287
+ if schema_or_table:
288
+ schema_ck = f"is_schema:{uid}:{schema_or_table.lower()}"
289
+ is_schema = _cache.get(schema_ck)
290
+ if is_schema is None:
291
+ with _pooled_connection(db_url) as conn:
292
+ cur = conn.cursor()
293
+ cur.execute(
294
+ "SELECT 1 FROM information_schema.schemata "
295
+ "WHERE LOWER(schema_name) = %s",
296
+ (schema_or_table.lower(),),
297
+ )
298
+ is_schema = cur.fetchone() is not None
299
+ cur.close()
300
+ _cache.put(schema_ck, is_schema)
301
+
302
+ if is_schema:
303
+ return self._tables_in_schema(uid, db_url, schema_or_table, prefix)
304
+ return self._columns_of_table(uid, db_url, schema, schema_or_table, prefix)
305
+
306
+ # --- explicit table → columns ---
307
+ if table:
308
+ return self._columns_of_table(uid, db_url, schema, table, prefix)
309
+
310
+ # --- default: list tables in schema ---
311
+ return self._tables_in_schema(uid, db_url, schema, prefix)
312
+
313
+ # -- helpers ------------------------------------------------------------
314
+
315
+ def _tables_in_schema(self, uid, db_url, schema, prefix):
316
+ """Return tables/views in *schema*, filtered by *prefix*."""
317
+ cache_key = f"tables:{uid}:{schema}"
318
+ all_tables = _cache.get(cache_key)
319
+ if all_tables is None:
320
+ with _pooled_connection(db_url) as conn:
321
+ cur = conn.cursor()
322
+ cur.execute(
323
+ "SELECT table_name, table_type "
324
+ "FROM information_schema.tables "
325
+ "WHERE table_schema = %s "
326
+ " AND table_type IN ('BASE TABLE', 'VIEW') "
327
+ "ORDER BY table_name",
328
+ (schema,),
152
329
  )
153
- cursor.close()
154
- return {
155
- "status": "success",
156
- "tables": [],
157
- "columns": [],
158
- "jsonbKeys": jsonb_keys
159
- }
160
-
161
- # Handle schema_or_table: check if it's a schema first, then try as table
162
- if schema_or_table:
163
- # First, check if it's a schema
164
- cursor.execute("""
165
- SELECT schema_name
166
- FROM information_schema.schemata
167
- WHERE LOWER(schema_name) = %s
168
- """, (schema_or_table.lower(),))
169
-
170
- is_schema = cursor.fetchone() is not None
171
-
172
- if is_schema:
173
- # It's a schema - fetch tables and views from that schema
174
- cursor.execute("""
175
- SELECT table_name, table_type
176
- FROM information_schema.tables
177
- WHERE table_schema = %s
178
- AND table_type IN ('BASE TABLE', 'VIEW')
179
- AND LOWER(table_name) LIKE %s
180
- ORDER BY table_name
181
- """, (schema_or_table, f"{prefix}%"))
182
-
183
- tables = [
184
- {
185
- "name": row[0],
186
- "type": "view" if row[1] == 'VIEW' else "table"
187
- }
188
- for row in cursor.fetchall()
189
- ]
190
- else:
191
- # Not a schema - treat as table name, fetch columns from default schema
192
- cursor.execute("""
193
- SELECT table_name, column_name, data_type
194
- FROM information_schema.columns
195
- WHERE table_schema = %s
196
- AND LOWER(table_name) = %s
197
- AND LOWER(column_name) LIKE %s
198
- ORDER BY ordinal_position
199
- """, (schema, schema_or_table.lower(), f"{prefix}%"))
200
-
201
- columns = [
202
- {
203
- "name": row[1],
204
- "table": row[0],
205
- "dataType": row[2],
206
- "type": "column"
207
- }
208
- for row in cursor.fetchall()
209
- ]
210
-
211
- # If table is specified with explicit schema, fetch columns from that table
212
- elif table:
213
- cursor.execute("""
214
- SELECT table_name, column_name, data_type
215
- FROM information_schema.columns
216
- WHERE table_schema = %s
217
- AND LOWER(table_name) = %s
218
- AND LOWER(column_name) LIKE %s
219
- ORDER BY ordinal_position
220
- """, (schema, table.lower(), f"{prefix}%"))
221
-
222
- columns = [
223
- {
224
- "name": row[1],
225
- "table": row[0],
226
- "dataType": row[2],
227
- "type": "column"
228
- }
229
- for row in cursor.fetchall()
330
+ all_tables = [
331
+ {"name": r[0], "type": "view" if r[1] == "VIEW" else "table"}
332
+ for r in cur.fetchall()
230
333
  ]
231
- else:
232
- # No table or schema_or_table specified - fetch tables and views from default schema
233
- cursor.execute("""
234
- SELECT table_name, table_type
235
- FROM information_schema.tables
236
- WHERE table_schema = %s
237
- AND table_type IN ('BASE TABLE', 'VIEW')
238
- AND LOWER(table_name) LIKE %s
239
- ORDER BY table_name
240
- """, (schema, f"{prefix}%"))
241
-
242
- tables = [
334
+ cur.close()
335
+ _cache.put(cache_key, all_tables)
336
+ return {
337
+ "status": "success",
338
+ "tables": _filter_by_prefix(all_tables, prefix),
339
+ "columns": [],
340
+ }
341
+
342
+ def _columns_of_table(self, uid, db_url, schema, table, prefix):
343
+ """Return columns of *table*, filtered by *prefix*."""
344
+ cache_key = f"columns:{uid}:{schema}:{table.lower()}"
345
+ all_cols = _cache.get(cache_key)
346
+ if all_cols is None:
347
+ with _pooled_connection(db_url) as conn:
348
+ cur = conn.cursor()
349
+ cur.execute(
350
+ "SELECT table_name, column_name, data_type "
351
+ "FROM information_schema.columns "
352
+ "WHERE table_schema = %s "
353
+ " AND LOWER(table_name) = %s "
354
+ "ORDER BY ordinal_position",
355
+ (schema, table.lower()),
356
+ )
357
+ all_cols = [
243
358
  {
244
- "name": row[0],
245
- "type": "view" if row[1] == 'VIEW' else "table"
359
+ "name": r[1],
360
+ "table": r[0],
361
+ "dataType": r[2],
362
+ "type": "column",
246
363
  }
247
- for row in cursor.fetchall()
364
+ for r in cur.fetchall()
248
365
  ]
366
+ cur.close()
367
+ _cache.put(cache_key, all_cols)
368
+ return {
369
+ "status": "success",
370
+ "tables": [],
371
+ "columns": _filter_by_prefix(all_cols, prefix),
372
+ }
249
373
 
250
- cursor.close()
251
-
252
- return {
253
- "status": "success",
254
- "tables": tables,
255
- "columns": columns
256
- }
257
-
258
- finally:
259
- if conn:
260
- conn.close()
374
+ # -- JSONB key extraction -----------------------------------------------
261
375
 
262
376
  def _fetch_jsonb_keys(
263
377
  self,
@@ -266,79 +380,65 @@ class PostgresCompletionsHandler(APIHandler):
266
380
  table_name: str,
267
381
  jsonb_column: str,
268
382
  jsonb_path: list = None,
269
- prefix: str = ''
270
383
  ) -> list:
271
- """Extract unique JSONB keys from a column in a table.
384
+ """Extract unique JSONB keys from a column.
272
385
 
273
- Args:
274
- cursor: Database cursor
275
- schema: Database schema
276
- table_name: Table containing the JSONB column (can be None)
277
- jsonb_column: Name of the JSONB column
278
- jsonb_path: Optional path for nested keys (e.g., ['user', 'profile'])
279
- prefix: Filter prefix for keys
280
-
281
- Returns:
282
- List of JSONB key completion items
386
+ Returns the full (unfiltered) list — the caller applies prefix
387
+ filtering. Uses bounded sub-queries so that at most 100 rows
388
+ are scanned for diagnostics and 1 000 for key extraction.
283
389
  """
284
390
  try:
285
- # If no table specified, find tables with this JSONB column
391
+ # If no table specified, find the first table with this JSONB column
286
392
  if not table_name:
287
- cursor.execute("""
288
- SELECT table_name
289
- FROM information_schema.columns
290
- WHERE table_schema = %s
291
- AND LOWER(column_name) = %s
292
- AND data_type = 'jsonb'
293
- LIMIT 1
294
- """, (schema, jsonb_column.lower()))
295
-
393
+ cursor.execute(
394
+ "SELECT table_name "
395
+ "FROM information_schema.columns "
396
+ "WHERE table_schema = %s "
397
+ " AND LOWER(column_name) = %s "
398
+ " AND data_type = 'jsonb' "
399
+ "LIMIT 1",
400
+ (schema, jsonb_column.lower()),
401
+ )
296
402
  result = cursor.fetchone()
297
403
  if not result:
298
404
  self.log.warning(
299
- f"JSONB completion: No JSONB column '{jsonb_column}' found "
300
- f"in schema '{schema}'. Verify the column exists and has "
301
- f"data_type='jsonb'."
405
+ f"JSONB completion: No JSONB column '{jsonb_column}' "
406
+ f"found in schema '{schema}'. Verify the column exists "
407
+ f"and has data_type='jsonb'."
302
408
  )
303
409
  return []
304
-
305
410
  table_name = result[0]
306
411
  self.log.info(
307
412
  f"JSONB completion: Found column '{jsonb_column}' in "
308
413
  f"table '{schema}.{table_name}'"
309
414
  )
310
415
 
311
- # Build the JSONB path expression
312
- if jsonb_path and len(jsonb_path) > 0:
313
- # For nested paths: column->>'key1'->>'key2'
314
- path_expr = jsonb_column
315
- for key in jsonb_path:
316
- path_expr = f"{path_expr}->'{key}'"
317
- else:
318
- # For top-level keys: just the column
319
- path_expr = jsonb_column
320
-
321
- # First, check the data distribution at this path for diagnostics
322
- diag_query = f"""
323
- SELECT
324
- COUNT(*) as total_rows,
325
- COUNT({path_expr}) as non_null_count,
326
- COUNT(CASE WHEN jsonb_typeof({path_expr}) = 'object' THEN 1 END) as object_count,
327
- COUNT(CASE WHEN jsonb_typeof({path_expr}) = 'array' THEN 1 END) as array_count,
328
- COUNT(CASE WHEN jsonb_typeof({path_expr}) IN ('string', 'number', 'boolean') THEN 1 END) as scalar_count
329
- FROM {schema}.{table_name}
330
- LIMIT 1000
331
- """
332
- cursor.execute(diag_query)
416
+ path = _jsonb_path_expr(jsonb_column, jsonb_path)
417
+ sch = pgsql.Identifier(schema)
418
+ tbl = pgsql.Identifier(table_name)
419
+
420
+ # Diagnostic sample (100 rows) — bounded via sub-query
421
+ cursor.execute(
422
+ pgsql.SQL(
423
+ "SELECT "
424
+ " COUNT(*), "
425
+ " COUNT(jval), "
426
+ " COUNT(CASE WHEN jsonb_typeof(jval) = 'object' THEN 1 END), "
427
+ " COUNT(CASE WHEN jsonb_typeof(jval) = 'array' THEN 1 END), "
428
+ " COUNT(CASE WHEN jsonb_typeof(jval) "
429
+ " IN ('string','number','boolean') THEN 1 END) "
430
+ "FROM (SELECT {0} AS jval FROM {1}.{2} LIMIT 100) sub"
431
+ ).format(path, sch, tbl)
432
+ )
333
433
  diag = cursor.fetchone()
334
-
335
- total_rows, non_null, obj_count, arr_count, scalar_count = diag
434
+ _, non_null, obj_count, arr_count, scalar_count = diag
336
435
 
337
436
  if non_null == 0:
338
437
  self.log.warning(
339
438
  f"JSONB completion: Column '{jsonb_column}' in "
340
439
  f"'{schema}.{table_name}' has no non-NULL values at "
341
- f"path '{path_expr}'. Keys cannot be extracted from NULL data."
440
+ f"path '{_jsonb_path_display(jsonb_column, jsonb_path)}'. "
441
+ f"Keys cannot be extracted from NULL data."
342
442
  )
343
443
  return []
344
444
 
@@ -349,57 +449,61 @@ class PostgresCompletionsHandler(APIHandler):
349
449
  if scalar_count > 0:
350
450
  type_info.append(f"{scalar_count} scalars")
351
451
  self.log.warning(
352
- f"JSONB completion: Path '{path_expr}' in "
452
+ f"JSONB completion: Path "
453
+ f"'{_jsonb_path_display(jsonb_column, jsonb_path)}' in "
353
454
  f"'{schema}.{table_name}' contains no JSON objects "
354
455
  f"(found: {', '.join(type_info) if type_info else 'only NULL'}). "
355
456
  f"Keys can only be extracted from object types."
356
457
  )
357
458
  return []
358
459
 
359
- # Query to extract unique keys
360
- # LIMIT to 1000 rows for performance (sample the table)
361
- query = f"""
362
- SELECT DISTINCT jsonb_object_keys({path_expr})
363
- FROM {schema}.{table_name}
364
- WHERE {path_expr} IS NOT NULL
365
- AND jsonb_typeof({path_expr}) = 'object'
366
- LIMIT 1000
367
- """
368
-
369
- cursor.execute(query)
460
+ # Key extraction scan at most 1 000 qualifying rows
461
+ cursor.execute(
462
+ pgsql.SQL(
463
+ "SELECT DISTINCT jsonb_object_keys(jval) "
464
+ "FROM ("
465
+ " SELECT {0} AS jval FROM {1}.{2} "
466
+ " WHERE {0} IS NOT NULL "
467
+ " AND jsonb_typeof({0}) = 'object' "
468
+ " LIMIT 1000"
469
+ ") sub"
470
+ ).format(path, sch, tbl)
471
+ )
370
472
  keys = cursor.fetchall()
371
473
 
372
474
  if len(keys) == 0:
373
475
  self.log.warning(
374
- f"JSONB completion: No keys found at path '{path_expr}' in "
476
+ f"JSONB completion: No keys found at path "
477
+ f"'{_jsonb_path_display(jsonb_column, jsonb_path)}' in "
375
478
  f"'{schema}.{table_name}' despite {obj_count} objects. "
376
479
  f"Objects may be empty {{}}."
377
480
  )
378
481
  return []
379
482
 
380
- # Filter by prefix and format results
381
- result = []
382
- for row in keys:
383
- key = row[0]
384
- if key.lower().startswith(prefix):
385
- result.append({
386
- "name": key,
387
- "type": "jsonb_key",
388
- "keyPath": (jsonb_path or []) + [key]
389
- })
390
-
391
483
  self.log.info(
392
- f"JSONB completion: Found {len(keys)} unique keys at '{path_expr}' "
393
- f"in '{schema}.{table_name}' (sampled {obj_count} objects)"
484
+ f"JSONB completion: Found {len(keys)} unique keys at "
485
+ f"'{_jsonb_path_display(jsonb_column, jsonb_path)}' in "
486
+ f"'{schema}.{table_name}' (sampled {obj_count} objects)"
394
487
  )
395
488
 
396
- return result
489
+ return [
490
+ {
491
+ "name": r[0],
492
+ "type": "jsonb_key",
493
+ "keyPath": (jsonb_path or []) + [r[0]],
494
+ }
495
+ for r in keys
496
+ ]
397
497
 
398
498
  except psycopg2.Error as e:
399
499
  self.log.error(f"JSONB key extraction error: {str(e).split(chr(10))[0]}")
400
500
  return []
401
501
 
402
502
 
503
+ # ---------------------------------------------------------------------------
504
+ # JSONB diagnostics handler
505
+ # ---------------------------------------------------------------------------
506
+
403
507
  class JsonbDiagnosticsHandler(APIHandler):
404
508
  """Handler for diagnosing JSONB column issues."""
405
509
 
@@ -488,9 +592,7 @@ class JsonbDiagnosticsHandler(APIHandler):
488
592
  jsonb_path: list = None
489
593
  ) -> dict:
490
594
  """Get diagnostic information about JSONB columns."""
491
- conn = None
492
- try:
493
- conn = psycopg2.connect(db_url)
595
+ with _pooled_connection(db_url) as conn:
494
596
  cursor = conn.cursor()
495
597
 
496
598
  result = {
@@ -502,12 +604,12 @@ class JsonbDiagnosticsHandler(APIHandler):
502
604
 
503
605
  # Find all JSONB columns in the schema
504
606
  query_params = [schema]
505
- query = """
506
- SELECT table_name, column_name
507
- FROM information_schema.columns
508
- WHERE table_schema = %s
509
- AND data_type = 'jsonb'
510
- """
607
+ query = (
608
+ "SELECT table_name, column_name "
609
+ "FROM information_schema.columns "
610
+ "WHERE table_schema = %s "
611
+ " AND data_type = 'jsonb'"
612
+ )
511
613
  if table:
512
614
  query += " AND LOWER(table_name) = %s"
513
615
  query_params.append(table.lower())
@@ -530,34 +632,31 @@ class JsonbDiagnosticsHandler(APIHandler):
530
632
  actual_table = jsonb_columns[0][0]
531
633
  actual_column = jsonb_columns[0][1]
532
634
 
533
- # Build path expression
534
- if jsonb_path and len(jsonb_path) > 0:
535
- path_expr = actual_column
536
- for key in jsonb_path:
537
- path_expr = f"{path_expr}->'{key}'"
538
- else:
539
- path_expr = actual_column
635
+ path_expr = _jsonb_path_expr(actual_column, jsonb_path)
636
+ sch_id = pgsql.Identifier(schema)
637
+ tbl_id = pgsql.Identifier(actual_table)
540
638
 
541
639
  # Get type distribution
542
- diag_query = f"""
543
- SELECT
544
- COUNT(*) as total_rows,
545
- COUNT({path_expr}) as non_null_count,
546
- COUNT(CASE WHEN jsonb_typeof({path_expr}) = 'object' THEN 1 END) as object_count,
547
- COUNT(CASE WHEN jsonb_typeof({path_expr}) = 'array' THEN 1 END) as array_count,
548
- COUNT(CASE WHEN jsonb_typeof({path_expr}) = 'string' THEN 1 END) as string_count,
549
- COUNT(CASE WHEN jsonb_typeof({path_expr}) = 'number' THEN 1 END) as number_count,
550
- COUNT(CASE WHEN jsonb_typeof({path_expr}) = 'boolean' THEN 1 END) as boolean_count,
551
- COUNT(CASE WHEN jsonb_typeof({path_expr}) = 'null' THEN 1 END) as json_null_count
552
- FROM {schema}.{actual_table}
553
- """
640
+ diag_query = pgsql.SQL(
641
+ "SELECT "
642
+ " COUNT(*) AS total_rows, "
643
+ " COUNT({0}) AS non_null_count, "
644
+ " COUNT(CASE WHEN jsonb_typeof({0}) = 'object' THEN 1 END) AS object_count, "
645
+ " COUNT(CASE WHEN jsonb_typeof({0}) = 'array' THEN 1 END) AS array_count, "
646
+ " COUNT(CASE WHEN jsonb_typeof({0}) = 'string' THEN 1 END) AS string_count, "
647
+ " COUNT(CASE WHEN jsonb_typeof({0}) = 'number' THEN 1 END) AS number_count, "
648
+ " COUNT(CASE WHEN jsonb_typeof({0}) = 'boolean' THEN 1 END) AS boolean_count, "
649
+ " COUNT(CASE WHEN jsonb_typeof({0}) = 'null' THEN 1 END) AS json_null_count "
650
+ "FROM {1}.{2}"
651
+ ).format(path_expr, sch_id, tbl_id)
652
+
554
653
  cursor.execute(diag_query)
555
654
  diag = cursor.fetchone()
556
655
 
557
656
  result["columnDiagnostics"] = {
558
657
  "table": actual_table,
559
658
  "column": actual_column,
560
- "pathExpression": path_expr,
659
+ "pathExpression": _jsonb_path_display(actual_column, jsonb_path),
561
660
  "totalRows": diag[0],
562
661
  "nonNullCount": diag[1],
563
662
  "typeDistribution": {
@@ -575,26 +674,24 @@ class JsonbDiagnosticsHandler(APIHandler):
575
674
  # If there are objects, get sample keys
576
675
  if diag[2] > 0:
577
676
  try:
578
- key_query = f"""
579
- SELECT DISTINCT jsonb_object_keys({path_expr})
580
- FROM {schema}.{actual_table}
581
- WHERE {path_expr} IS NOT NULL
582
- AND jsonb_typeof({path_expr}) = 'object'
583
- LIMIT 20
584
- """
677
+ key_query = pgsql.SQL(
678
+ "SELECT DISTINCT jsonb_object_keys(jval) "
679
+ "FROM ("
680
+ " SELECT {0} AS jval FROM {1}.{2} "
681
+ " WHERE {0} IS NOT NULL "
682
+ " AND jsonb_typeof({0}) = 'object' "
683
+ " LIMIT 1000"
684
+ ") sub"
685
+ ).format(path_expr, sch_id, tbl_id)
585
686
  cursor.execute(key_query)
586
687
  keys = [row[0] for row in cursor.fetchall()]
587
- result["columnDiagnostics"]["sampleKeys"] = keys
688
+ result["columnDiagnostics"]["sampleKeys"] = keys[:20]
588
689
  except psycopg2.Error:
589
690
  result["columnDiagnostics"]["sampleKeys"] = []
590
691
 
591
692
  cursor.close()
592
693
  return result
593
694
 
594
- finally:
595
- if conn:
596
- conn.close()
597
-
598
695
  def _get_recommendation(self, diag) -> str:
599
696
  """Generate a recommendation based on diagnostic data."""
600
697
  total, non_null, obj, arr, string, number, boolean, json_null = diag
@@ -628,6 +725,10 @@ class JsonbDiagnosticsHandler(APIHandler):
628
725
  return f"JSONB autocompletion should work. Found {obj} objects with extractable keys."
629
726
 
630
727
 
728
+ # ---------------------------------------------------------------------------
729
+ # Connections handler
730
+ # ---------------------------------------------------------------------------
731
+
631
732
  class ConnectionsHandler(APIHandler):
632
733
  """Handler for listing available database connections."""
633
734
 
@@ -660,6 +761,10 @@ class ConnectionsHandler(APIHandler):
660
761
  }))
661
762
 
662
763
 
764
+ # ---------------------------------------------------------------------------
765
+ # Route registration
766
+ # ---------------------------------------------------------------------------
767
+
663
768
  def setup_route_handlers(web_app):
664
769
  """Register route handlers with the Jupyter server."""
665
770
  host_pattern = ".*$"
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "jl_db_comp",
3
- "version": "0.1.10",
3
+ "version": "0.1.11",
4
4
  "description": "A JupyterLab extension to complete db queries in jupyterlab notebooks",
5
5
  "keywords": [
6
6
  "jupyter",
@@ -118,7 +118,7 @@
118
118
  "outputDir": "jl_db_comp/labextension",
119
119
  "schemaDir": "schema",
120
120
  "_build": {
121
- "load": "static/remoteEntry.e1b6bbc28eb414e4e599.js",
121
+ "load": "static/remoteEntry.2f90ff333d0ac2cbb0a8.js",
122
122
  "extension": "./extension",
123
123
  "style": "./style"
124
124
  }
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "jl_db_comp",
3
- "version": "0.1.10",
3
+ "version": "0.1.11",
4
4
  "description": "A JupyterLab extension to complete db queries in jupyterlab notebooks",
5
5
  "keywords": [
6
6
  "jupyter",
@@ -0,0 +1 @@
1
+ "use strict";(self.webpackChunkjl_db_comp=self.webpackChunkjl_db_comp||[]).push([[171],{171(e,n,t){t.r(n),t.d(n,{default:()=>m});var o=t(141),s=t(931),i=t(249),a=t(125),r=t(526);async function c(e="",n={}){const t=a.ServerConnection.makeSettings(),o=r.URLExt.join(t.baseUrl,"jl-db-comp",e);let s;try{s=await a.ServerConnection.makeRequest(o,n,t)}catch(e){throw new a.ServerConnection.NetworkError(e)}let i=await s.text();if(i.length>0)try{i=JSON.parse(i)}catch(e){console.log("Not a JSON response body.",s)}if(!s.ok)throw new a.ServerConnection.ResponseError(s,i.message||i);return i}class l{constructor(e,n){this.identifier="jl_db_comp:postgres-completer",this.renderer=null,this._cache=new Map,this._cacheTTL=3e5,this._settings=null,this._notebookTracker=null,this._connectionName="",this._schema="public",this._enabled=!0,this._availableConnections=[],this._cachedKernelConfig=null,this._kernelConfigCacheTime=0,this._kernelConfigCacheTTL=3e4,this._sqlKeywords=["select","from","join","where","insert","update","delete","inner","left","right","outer","on","group","order","by","having","into","values","set"],this._notebookTracker=n||null,e&&(this._settings=e,this._loadSettings(),e.changed.connect(()=>{this._loadSettings()})),this._loadAvailableConnections()}_loadSettings(){this._settings&&(this._connectionName=this._settings.get("connectionName").composite,this._schema=this._settings.get("schema").composite,this._enabled=this._settings.get("enabled").composite)}async _loadAvailableConnections(){try{const e=await async function(){try{return await c("connections",{method:"GET"})}catch(e){if(e instanceof a.ServerConnection.ResponseError)console.error(`Failed to fetch connections: ${e.message}`);else{const n=e instanceof Error?e.message:"Unknown error";console.error(`Failed to fetch connections: ${n}`)}return{status:"error",connections:{},filePath:null,message:"Failed to fetch connections"}}}();"success"===e.status&&(this._availableConnections=Object.keys(e.connections))}catch(e){console.warn("Failed to load available connections:",e)}}async _getKernelConfig(){var e;const n=Date.now();if(this._cachedKernelConfig&&n-this._kernelConfigCacheTime<this._kernelConfigCacheTTL)return this._cachedKernelConfig;if(!this._notebookTracker)return null;const t=this._notebookTracker.currentWidget;if(!t)return null;const o=null===(e=t.sessionContext.session)||void 0===e?void 0:e.kernel;if(!o)return null;try{const e=o.requestExecute({code:"\nimport json\nimport os\nresult = {'connection': '', 'dsn_filename': ''}\n\n# Get active connection\ntry:\n from sql.connection import ConnectionManager\n conn = ConnectionManager.current\n if conn:\n for alias, c in ConnectionManager.connections.items():\n if c is conn:\n result['connection'] = alias\n break\nexcept:\n pass\n\n# Get dsn_filename from SqlMagic instance\ndsn_filename = None\ntry:\n from sql.magic import SqlMagic\n ip = get_ipython()\n if ip:\n for name, inst in ip.magics_manager.registry.items():\n if isinstance(inst, SqlMagic):\n dsn_filename = inst.dsn_filename\n break\nexcept:\n pass\n\n# Fallback: try to get from config\nif not dsn_filename:\n try:\n ip = get_ipython()\n if ip and hasattr(ip, 'config'):\n sql_config = ip.config.get('SqlMagic', {})\n if 'dsn_filename' in sql_config:\n dsn_filename = sql_config['dsn_filename']\n except:\n pass\n\n# Convert to absolute path if we have a dsn_filename\nif dsn_filename:\n if not os.path.isabs(dsn_filename):\n # Resolve relative to current working directory\n dsn_filename = os.path.abspath(dsn_filename)\n result['dsn_filename'] = dsn_filename\n\nprint(json.dumps(result))\n",silent:!0,store_history:!1}),t=await new Promise(n=>{let t="";e.onIOPub=e=>{if("stream"===e.header.msg_type){const n=e.content;"stdout"===n.name&&(t+=n.text)}},e.done.then(()=>{try{const e=JSON.parse(t.trim());n({connection:e.connection||"",dsnFilename:e.dsn_filename||""})}catch(e){n(null)}}).catch(()=>{n(null)})});return t&&(this._cachedKernelConfig=t,this._kernelConfigCacheTime=n),t}catch(e){return console.warn("Failed to get jupysql config from kernel:",e),null}}async isApplicable(e){if(!this._enabled)return!1;const n=e.editor;if(!n)return!1;const t=n.model.sharedModel.getSource();if(!t)return!1;const o=t.toLowerCase();return this._sqlKeywords.some(e=>o.includes(e))}async fetch(e,n){var t;if(!this._enabled)return{start:e.offset,end:e.offset,items:[]};const{text:o,offset:s}=e,i=this._extractContext(o,s);let r;if(i.jsonbColumn){const e=(null===(t=i.jsonbPath)||void 0===t?void 0:t.join("."))||"";r=`jsonb:${i.schemaOrTable?`${i.schemaOrTable}.`:""}${i.jsonbColumn}->${e}`.toLowerCase()}else r=i.schema&&i.tableName?`cols:${i.schema}.${i.tableName}`.toLowerCase():i.schemaOrTable?`sot:${i.schemaOrTable}`.toLowerCase():`tables:${this._schema}`.toLowerCase();const l=this._getCached(r);if(l){const n=this._filterByPrefix(l,i.prefix);return this._formatReply(n,e.offset,i.prefix)}try{const n=await this._getKernelConfig();let t,o=this._connectionName;n&&(n.dsnFilename&&(t=n.dsnFilename),!o&&n.connection&&(o=n.connection)),!o&&this._availableConnections.length>0&&(o=this._availableConnections[0]);const s=await async function(e,n="",t="public",o,s,i,r,l){try{const a=new URLSearchParams;e&&a.append("connection",e),l&&a.append("connections_file",l),n&&a.append("prefix",n),a.append("schema",t),o&&a.append("table",o),s&&a.append("schema_or_table",s),i&&(a.append("jsonb_column",i),r&&r.length>0&&a.append("jsonb_path",JSON.stringify(r)));const h=`completions?${a.toString()}`,m=await c(h,{method:"GET"});return"error"===m.status?(console.error("PostgreSQL completion error:",m.message),[]):i&&m.jsonbKeys?m.jsonbKeys:o||s?m.columns.length>0?m.columns:m.tables:[...m.tables,...m.columns]}catch(e){if(e instanceof a.ServerConnection.ResponseError){const n=e.response.status;let t=e.message;"string"==typeof t&&(t.includes("<!DOCTYPE")||t.includes("<html"))&&(t=`HTML error page (${t.substring(0,100)}...)`),console.error(`PostgreSQL completions API failed (${n}): ${t}`)}else{const n=e instanceof Error?e.message:"Unknown error";console.error(`PostgreSQL completions API failed: ${n}`)}return[]}}(o||void 0,"",i.schema||this._schema,i.tableName,i.schemaOrTable,i.jsonbColumn,i.jsonbPath,t);this._cache.set(r,{items:s,timestamp:Date.now()});const l=this._filterByPrefix(s,i.prefix);return this._formatReply(l,e.offset,i.prefix)}catch(n){return console.error("Failed to fetch PostgreSQL completions:",n),{start:e.offset,end:e.offset,items:[]}}}_extractContext(e,n){const t=e.substring(0,n);if(t.includes("->")){const e=e=>{const n=[],t=/['"]?([\w]+)['"]?\s*->/g;let o;for(;null!==(o=t.exec(e));)n.push(o[1]);const s=e.lastIndexOf("->");let i="";return i=s>=0?e.substring(s+2).trim().replace(/['"]/g,""):e.trim().replace(/['"]/g,""),{jsonbPath:n,prefix:i}},n=t.match(/([\w]+)\.([\w]+)\.([\w]+)\s*->\s*(.*)$/);if(n){const t=n[1],o=n[2],s=n[3],i=n[4],{jsonbPath:a,prefix:r}=e(i);return{schema:t,tableName:o,jsonbColumn:s,jsonbPath:a,prefix:r}}const o=t.match(/([\w]+)\.([\w]+)\s*->\s*(.*)$/);if(o){const n=o[1],t=o[2],s=o[3],{jsonbPath:i,prefix:a}=e(s);return{schemaOrTable:n,jsonbColumn:t,jsonbPath:i,prefix:a}}const s=t.match(/([\w]+)\s*->\s*(.*)$/);if(s){const n=s[1],t=s[2],{jsonbPath:o,prefix:i}=e(t);return{jsonbColumn:n,jsonbPath:o,prefix:i}}}const o=t.match(/([\w]+)\.([\w]+)\.([\w]*)$/);if(o)return{schema:o[1],tableName:o[2],prefix:o[3]};const s=t.match(/([\w]+)\.([\w]*)$/);if(s)return{schemaOrTable:s[1],prefix:s[2]};const i=t.match(/[\w]+$/),a=i?i[0]:"",r=e.toLowerCase().match(/\bfrom\s+([\w]+\.)?[\w]+/);if(r){const e=r[0].match(/\bfrom\s+(?:([\w]+)\.)?([\w]+)/);if(e){const n=e[1],t=e[2];return n?{schema:n,tableName:t,prefix:a}:{schemaOrTable:t,prefix:a}}}return{prefix:a}}_getCached(e){const n=e.toLowerCase(),t=this._cache.get(n);return t?Date.now()-t.timestamp>this._cacheTTL?(this._cache.delete(n),null):t.items:null}_formatReply(e,n,t){return{start:n-t.length,end:n,items:e.map(e=>{let n=e.name,t=e.name;"jsonb_key"===e.type&&(t=`'${e.name}'`),"column"===e.type&&e.table&&(n=`${e.name} (${e.table})`);let o,s="📊",i=e.name;return"table"===e.type?s="📋":"view"===e.type?s="👁️":"jsonb_key"===e.type&&(s="🔑",i=`0000${e.name}`),"column"===e.type&&e.dataType&&e.table?o=`${e.table}.${e.name}: ${e.dataType}`:"jsonb_key"===e.type&&e.keyPath&&(o=`JSONB key: ${e.keyPath.join(" -> ")}`),{label:`${s} ${n}`,insertText:t,sortText:i,type:e.type,documentation:o}})}}_filterByPrefix(e,n){if(!n)return e;const t=n.toLowerCase();return e.filter(e=>e.name.toLowerCase().startsWith(t))}clearCache(){this._cache.clear()}}const h="jl_db_comp:plugin",m={id:h,description:"A JupyterLab extension to complete db queries in jupyterlab notebooks",autoStart:!0,requires:[o.ICompletionProviderManager],optional:[i.ISettingRegistry,s.INotebookTracker],activate:(e,n,t,o)=>{let s;t?t.load(h).then(e=>{s=new l(e,o),n.registerProvider(s),console.log("JupyterLab extension jl_db_comp is activated!")}).catch(e=>{console.error("Failed to load settings for jl_db_comp:",e),s=new l(null,o),n.registerProvider(s),console.log("JupyterLab extension jl_db_comp is activated!")}):(s=new l(null,o),n.registerProvider(s),console.log("JupyterLab extension jl_db_comp is activated!"))}}}}]);
@@ -1 +1 @@
1
- var _JUPYTERLAB;(()=>{"use strict";var e,r,t,n,o,a,i,u,l,s,f,c,d,p,h,v,b,g,m,y={246(e,r,t){var n={"./index":()=>t.e(171).then(()=>()=>t(171)),"./extension":()=>t.e(171).then(()=>()=>t(171)),"./style":()=>t.e(728).then(()=>()=>t(728))},o=(e,r)=>(t.R=r,r=t.o(n,e)?n[e]():Promise.resolve().then(()=>{throw new Error('Module "'+e+'" does not exist in container.')}),t.R=void 0,r),a=(e,r)=>{if(t.S){var n="default",o=t.S[n];if(o&&o!==e)throw new Error("Container initialization failed as it has already been initialized with a different share scope");return t.S[n]=e,t.I(n,r)}};t.d(r,{get:()=>o,init:()=>a})}},w={};function j(e){var r=w[e];if(void 0!==r)return r.exports;var t=w[e]={id:e,exports:{}};return y[e](t,t.exports,j),t.exports}j.m=y,j.c=w,j.n=e=>{var r=e&&e.__esModule?()=>e.default:()=>e;return j.d(r,{a:r}),r},j.d=(e,r)=>{for(var t in r)j.o(r,t)&&!j.o(e,t)&&Object.defineProperty(e,t,{enumerable:!0,get:r[t]})},j.f={},j.e=e=>Promise.all(Object.keys(j.f).reduce((r,t)=>(j.f[t](e,r),r),[])),j.u=e=>e+"."+{171:"545011db1d7843ce3ab3",728:"6552504d5b9b27551bc5"}[e]+".js?v="+{171:"545011db1d7843ce3ab3",728:"6552504d5b9b27551bc5"}[e],j.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),j.o=(e,r)=>Object.prototype.hasOwnProperty.call(e,r),e={},r="jl_db_comp:",j.l=(t,n,o,a)=>{if(e[t])e[t].push(n);else{var i,u;if(void 0!==o)for(var l=document.getElementsByTagName("script"),s=0;s<l.length;s++){var f=l[s];if(f.getAttribute("src")==t||f.getAttribute("data-webpack")==r+o){i=f;break}}i||(u=!0,(i=document.createElement("script")).charset="utf-8",j.nc&&i.setAttribute("nonce",j.nc),i.setAttribute("data-webpack",r+o),i.src=t),e[t]=[n];var c=(r,n)=>{i.onerror=i.onload=null,clearTimeout(d);var o=e[t];if(delete e[t],i.parentNode&&i.parentNode.removeChild(i),o&&o.forEach(e=>e(n)),r)return r(n)},d=setTimeout(c.bind(null,void 0,{type:"timeout",target:i}),12e4);i.onerror=c.bind(null,i.onerror),i.onload=c.bind(null,i.onload),u&&document.head.appendChild(i)}},j.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},(()=>{j.S={};var e={},r={};j.I=(t,n)=>{n||(n=[]);var o=r[t];if(o||(o=r[t]={}),!(n.indexOf(o)>=0)){if(n.push(o),e[t])return e[t];j.o(j.S,t)||(j.S[t]={});var a=j.S[t],i="jl_db_comp",u=[];return"default"===t&&((e,r,t,n)=>{var o=a[e]=a[e]||{},u=o[r];(!u||!u.loaded&&(1!=!u.eager?n:i>u.from))&&(o[r]={get:()=>j.e(171).then(()=>()=>j(171)),from:i,eager:!1})})("jl_db_comp","0.1.10"),e[t]=u.length?Promise.all(u).then(()=>e[t]=1):1}}})(),(()=>{var e;j.g.importScripts&&(e=j.g.location+"");var r=j.g.document;if(!e&&r&&(r.currentScript&&"SCRIPT"===r.currentScript.tagName.toUpperCase()&&(e=r.currentScript.src),!e)){var t=r.getElementsByTagName("script");if(t.length)for(var n=t.length-1;n>-1&&(!e||!/^http(s?):/.test(e));)e=t[n--].src}if(!e)throw new Error("Automatic publicPath is not supported in this browser");e=e.replace(/^blob:/,"").replace(/#.*$/,"").replace(/\?.*$/,"").replace(/\/[^\/]+$/,"/"),j.p=e})(),t=e=>{var r=e=>e.split(".").map(e=>+e==e?+e:e),t=/^([^-+]+)?(?:-([^+]+))?(?:\+(.+))?$/.exec(e),n=t[1]?r(t[1]):[];return t[2]&&(n.length++,n.push.apply(n,r(t[2]))),t[3]&&(n.push([]),n.push.apply(n,r(t[3]))),n},n=(e,r)=>{e=t(e),r=t(r);for(var n=0;;){if(n>=e.length)return n<r.length&&"u"!=(typeof r[n])[0];var o=e[n],a=(typeof o)[0];if(n>=r.length)return"u"==a;var i=r[n],u=(typeof i)[0];if(a!=u)return"o"==a&&"n"==u||"s"==u||"u"==a;if("o"!=a&&"u"!=a&&o!=i)return o<i;n++}},o=e=>{var r=e[0],t="";if(1===e.length)return"*";if(r+.5){t+=0==r?">=":-1==r?"<":1==r?"^":2==r?"~":r>0?"=":"!=";for(var n=1,a=1;a<e.length;a++)n--,t+="u"==(typeof(u=e[a]))[0]?"-":(n>0?".":"")+(n=2,u);return t}var i=[];for(a=1;a<e.length;a++){var u=e[a];i.push(0===u?"not("+l()+")":1===u?"("+l()+" || "+l()+")":2===u?i.pop()+" "+i.pop():o(u))}return l();function l(){return i.pop().replace(/^\((.+)\)$/,"$1")}},a=(e,r)=>{if(0 in e){r=t(r);var n=e[0],o=n<0;o&&(n=-n-1);for(var i=0,u=1,l=!0;;u++,i++){var s,f,c=u<e.length?(typeof e[u])[0]:"";if(i>=r.length||"o"==(f=(typeof(s=r[i]))[0]))return!l||("u"==c?u>n&&!o:""==c!=o);if("u"==f){if(!l||"u"!=c)return!1}else if(l)if(c==f)if(u<=n){if(s!=e[u])return!1}else{if(o?s>e[u]:s<e[u])return!1;s!=e[u]&&(l=!1)}else if("s"!=c&&"n"!=c){if(o||u<=n)return!1;l=!1,u--}else{if(u<=n||f<c!=o)return!1;l=!1}else"s"!=c&&"n"!=c&&(l=!1,u--)}}var d=[],p=d.pop.bind(d);for(i=1;i<e.length;i++){var h=e[i];d.push(1==h?p()|p():2==h?p()&p():h?a(h,r):!p())}return!!p()},i=(e,r)=>e&&j.o(e,r),u=e=>(e.loaded=1,e.get()),l=e=>Object.keys(e).reduce((r,t)=>(e[t].eager&&(r[t]=e[t]),r),{}),s=(e,r,t)=>{var o=t?l(e[r]):e[r];return Object.keys(o).reduce((e,r)=>!e||!o[e].loaded&&n(e,r)?r:e,0)},f=(e,r,t,n)=>"Unsatisfied version "+t+" from "+(t&&e[r][t].from)+" of shared singleton module "+r+" (required "+o(n)+")",c=e=>{throw new Error(e)},d=e=>{"undefined"!=typeof console&&console.warn&&console.warn(e)},p=(e,r,t)=>t?t():((e,r)=>c("Shared module "+r+" doesn't exist in shared scope "+e))(e,r),h=(e=>function(r,t,n,o,a){var i=j.I(r);return i&&i.then&&!n?i.then(e.bind(e,r,j.S[r],t,!1,o,a)):e(r,j.S[r],t,n,o,a)})((e,r,t,n,o,l)=>{if(!i(r,t))return p(e,t,l);var c=s(r,t,n);return a(o,c)||d(f(r,t,c,o)),u(r[t][c])}),v={},b={125:()=>h("default","@jupyterlab/services",!1,[1,7,5,3]),141:()=>h("default","@jupyterlab/completer",!1,[1,4,5,3]),249:()=>h("default","@jupyterlab/settingregistry",!1,[1,4,5,3]),526:()=>h("default","@jupyterlab/coreutils",!1,[1,6,5,3]),931:()=>h("default","@jupyterlab/notebook",!1,[1,4,5,3])},g={171:[125,141,249,526,931]},m={},j.f.consumes=(e,r)=>{j.o(g,e)&&g[e].forEach(e=>{if(j.o(v,e))return r.push(v[e]);if(!m[e]){var t=r=>{v[e]=0,j.m[e]=t=>{delete j.c[e],t.exports=r()}};m[e]=!0;var n=r=>{delete v[e],j.m[e]=t=>{throw delete j.c[e],r}};try{var o=b[e]();o.then?r.push(v[e]=o.then(t).catch(n)):t(o)}catch(e){n(e)}}})},(()=>{var e={248:0};j.f.j=(r,t)=>{var n=j.o(e,r)?e[r]:void 0;if(0!==n)if(n)t.push(n[2]);else{var o=new Promise((t,o)=>n=e[r]=[t,o]);t.push(n[2]=o);var a=j.p+j.u(r),i=new Error;j.l(a,t=>{if(j.o(e,r)&&(0!==(n=e[r])&&(e[r]=void 0),n)){var o=t&&("load"===t.type?"missing":t.type),a=t&&t.target&&t.target.src;i.message="Loading chunk "+r+" failed.\n("+o+": "+a+")",i.name="ChunkLoadError",i.type=o,i.request=a,n[1](i)}},"chunk-"+r,r)}};var r=(r,t)=>{var n,o,[a,i,u]=t,l=0;if(a.some(r=>0!==e[r])){for(n in i)j.o(i,n)&&(j.m[n]=i[n]);u&&u(j)}for(r&&r(t);l<a.length;l++)o=a[l],j.o(e,o)&&e[o]&&e[o][0](),e[o]=0},t=self.webpackChunkjl_db_comp=self.webpackChunkjl_db_comp||[];t.forEach(r.bind(null,0)),t.push=r.bind(null,t.push.bind(t))})(),j.nc=void 0;var S=j(246);(_JUPYTERLAB=void 0===_JUPYTERLAB?{}:_JUPYTERLAB).jl_db_comp=S})();
1
+ var _JUPYTERLAB;(()=>{"use strict";var e,r,t,n,o,a,i,u,l,f,s,d,c,p,h,v,b,g,m,y={246(e,r,t){var n={"./index":()=>t.e(171).then(()=>()=>t(171)),"./extension":()=>t.e(171).then(()=>()=>t(171)),"./style":()=>t.e(728).then(()=>()=>t(728))},o=(e,r)=>(t.R=r,r=t.o(n,e)?n[e]():Promise.resolve().then(()=>{throw new Error('Module "'+e+'" does not exist in container.')}),t.R=void 0,r),a=(e,r)=>{if(t.S){var n="default",o=t.S[n];if(o&&o!==e)throw new Error("Container initialization failed as it has already been initialized with a different share scope");return t.S[n]=e,t.I(n,r)}};t.d(r,{get:()=>o,init:()=>a})}},w={};function j(e){var r=w[e];if(void 0!==r)return r.exports;var t=w[e]={id:e,exports:{}};return y[e](t,t.exports,j),t.exports}j.m=y,j.c=w,j.n=e=>{var r=e&&e.__esModule?()=>e.default:()=>e;return j.d(r,{a:r}),r},j.d=(e,r)=>{for(var t in r)j.o(r,t)&&!j.o(e,t)&&Object.defineProperty(e,t,{enumerable:!0,get:r[t]})},j.f={},j.e=e=>Promise.all(Object.keys(j.f).reduce((r,t)=>(j.f[t](e,r),r),[])),j.u=e=>e+"."+{171:"c84e54337cfd177ebe22",728:"6552504d5b9b27551bc5"}[e]+".js?v="+{171:"c84e54337cfd177ebe22",728:"6552504d5b9b27551bc5"}[e],j.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),j.o=(e,r)=>Object.prototype.hasOwnProperty.call(e,r),e={},r="jl_db_comp:",j.l=(t,n,o,a)=>{if(e[t])e[t].push(n);else{var i,u;if(void 0!==o)for(var l=document.getElementsByTagName("script"),f=0;f<l.length;f++){var s=l[f];if(s.getAttribute("src")==t||s.getAttribute("data-webpack")==r+o){i=s;break}}i||(u=!0,(i=document.createElement("script")).charset="utf-8",j.nc&&i.setAttribute("nonce",j.nc),i.setAttribute("data-webpack",r+o),i.src=t),e[t]=[n];var d=(r,n)=>{i.onerror=i.onload=null,clearTimeout(c);var o=e[t];if(delete e[t],i.parentNode&&i.parentNode.removeChild(i),o&&o.forEach(e=>e(n)),r)return r(n)},c=setTimeout(d.bind(null,void 0,{type:"timeout",target:i}),12e4);i.onerror=d.bind(null,i.onerror),i.onload=d.bind(null,i.onload),u&&document.head.appendChild(i)}},j.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},(()=>{j.S={};var e={},r={};j.I=(t,n)=>{n||(n=[]);var o=r[t];if(o||(o=r[t]={}),!(n.indexOf(o)>=0)){if(n.push(o),e[t])return e[t];j.o(j.S,t)||(j.S[t]={});var a=j.S[t],i="jl_db_comp",u=[];return"default"===t&&((e,r,t,n)=>{var o=a[e]=a[e]||{},u=o[r];(!u||!u.loaded&&(1!=!u.eager?n:i>u.from))&&(o[r]={get:()=>j.e(171).then(()=>()=>j(171)),from:i,eager:!1})})("jl_db_comp","0.1.11"),e[t]=u.length?Promise.all(u).then(()=>e[t]=1):1}}})(),(()=>{var e;j.g.importScripts&&(e=j.g.location+"");var r=j.g.document;if(!e&&r&&(r.currentScript&&"SCRIPT"===r.currentScript.tagName.toUpperCase()&&(e=r.currentScript.src),!e)){var t=r.getElementsByTagName("script");if(t.length)for(var n=t.length-1;n>-1&&(!e||!/^http(s?):/.test(e));)e=t[n--].src}if(!e)throw new Error("Automatic publicPath is not supported in this browser");e=e.replace(/^blob:/,"").replace(/#.*$/,"").replace(/\?.*$/,"").replace(/\/[^\/]+$/,"/"),j.p=e})(),t=e=>{var r=e=>e.split(".").map(e=>+e==e?+e:e),t=/^([^-+]+)?(?:-([^+]+))?(?:\+(.+))?$/.exec(e),n=t[1]?r(t[1]):[];return t[2]&&(n.length++,n.push.apply(n,r(t[2]))),t[3]&&(n.push([]),n.push.apply(n,r(t[3]))),n},n=(e,r)=>{e=t(e),r=t(r);for(var n=0;;){if(n>=e.length)return n<r.length&&"u"!=(typeof r[n])[0];var o=e[n],a=(typeof o)[0];if(n>=r.length)return"u"==a;var i=r[n],u=(typeof i)[0];if(a!=u)return"o"==a&&"n"==u||"s"==u||"u"==a;if("o"!=a&&"u"!=a&&o!=i)return o<i;n++}},o=e=>{var r=e[0],t="";if(1===e.length)return"*";if(r+.5){t+=0==r?">=":-1==r?"<":1==r?"^":2==r?"~":r>0?"=":"!=";for(var n=1,a=1;a<e.length;a++)n--,t+="u"==(typeof(u=e[a]))[0]?"-":(n>0?".":"")+(n=2,u);return t}var i=[];for(a=1;a<e.length;a++){var u=e[a];i.push(0===u?"not("+l()+")":1===u?"("+l()+" || "+l()+")":2===u?i.pop()+" "+i.pop():o(u))}return l();function l(){return i.pop().replace(/^\((.+)\)$/,"$1")}},a=(e,r)=>{if(0 in e){r=t(r);var n=e[0],o=n<0;o&&(n=-n-1);for(var i=0,u=1,l=!0;;u++,i++){var f,s,d=u<e.length?(typeof e[u])[0]:"";if(i>=r.length||"o"==(s=(typeof(f=r[i]))[0]))return!l||("u"==d?u>n&&!o:""==d!=o);if("u"==s){if(!l||"u"!=d)return!1}else if(l)if(d==s)if(u<=n){if(f!=e[u])return!1}else{if(o?f>e[u]:f<e[u])return!1;f!=e[u]&&(l=!1)}else if("s"!=d&&"n"!=d){if(o||u<=n)return!1;l=!1,u--}else{if(u<=n||s<d!=o)return!1;l=!1}else"s"!=d&&"n"!=d&&(l=!1,u--)}}var c=[],p=c.pop.bind(c);for(i=1;i<e.length;i++){var h=e[i];c.push(1==h?p()|p():2==h?p()&p():h?a(h,r):!p())}return!!p()},i=(e,r)=>e&&j.o(e,r),u=e=>(e.loaded=1,e.get()),l=e=>Object.keys(e).reduce((r,t)=>(e[t].eager&&(r[t]=e[t]),r),{}),f=(e,r,t)=>{var o=t?l(e[r]):e[r];return Object.keys(o).reduce((e,r)=>!e||!o[e].loaded&&n(e,r)?r:e,0)},s=(e,r,t,n)=>"Unsatisfied version "+t+" from "+(t&&e[r][t].from)+" of shared singleton module "+r+" (required "+o(n)+")",d=e=>{throw new Error(e)},c=e=>{"undefined"!=typeof console&&console.warn&&console.warn(e)},p=(e,r,t)=>t?t():((e,r)=>d("Shared module "+r+" doesn't exist in shared scope "+e))(e,r),h=(e=>function(r,t,n,o,a){var i=j.I(r);return i&&i.then&&!n?i.then(e.bind(e,r,j.S[r],t,!1,o,a)):e(r,j.S[r],t,n,o,a)})((e,r,t,n,o,l)=>{if(!i(r,t))return p(e,t,l);var d=f(r,t,n);return a(o,d)||c(s(r,t,d,o)),u(r[t][d])}),v={},b={125:()=>h("default","@jupyterlab/services",!1,[1,7,5,3]),141:()=>h("default","@jupyterlab/completer",!1,[1,4,5,3]),249:()=>h("default","@jupyterlab/settingregistry",!1,[1,4,5,3]),526:()=>h("default","@jupyterlab/coreutils",!1,[1,6,5,3]),931:()=>h("default","@jupyterlab/notebook",!1,[1,4,5,3])},g={171:[125,141,249,526,931]},m={},j.f.consumes=(e,r)=>{j.o(g,e)&&g[e].forEach(e=>{if(j.o(v,e))return r.push(v[e]);if(!m[e]){var t=r=>{v[e]=0,j.m[e]=t=>{delete j.c[e],t.exports=r()}};m[e]=!0;var n=r=>{delete v[e],j.m[e]=t=>{throw delete j.c[e],r}};try{var o=b[e]();o.then?r.push(v[e]=o.then(t).catch(n)):t(o)}catch(e){n(e)}}})},(()=>{var e={248:0};j.f.j=(r,t)=>{var n=j.o(e,r)?e[r]:void 0;if(0!==n)if(n)t.push(n[2]);else{var o=new Promise((t,o)=>n=e[r]=[t,o]);t.push(n[2]=o);var a=j.p+j.u(r),i=new Error;j.l(a,t=>{if(j.o(e,r)&&(0!==(n=e[r])&&(e[r]=void 0),n)){var o=t&&("load"===t.type?"missing":t.type),a=t&&t.target&&t.target.src;i.message="Loading chunk "+r+" failed.\n("+o+": "+a+")",i.name="ChunkLoadError",i.type=o,i.request=a,n[1](i)}},"chunk-"+r,r)}};var r=(r,t)=>{var n,o,[a,i,u]=t,l=0;if(a.some(r=>0!==e[r])){for(n in i)j.o(i,n)&&(j.m[n]=i[n]);u&&u(j)}for(r&&r(t);l<a.length;l++)o=a[l],j.o(e,o)&&e[o]&&e[o][0](),e[o]=0},t=self.webpackChunkjl_db_comp=self.webpackChunkjl_db_comp||[];t.forEach(r.bind(null,0)),t.push=r.bind(null,t.push.bind(t))})(),j.nc=void 0;var S=j(246);(_JUPYTERLAB=void 0===_JUPYTERLAB?{}:_JUPYTERLAB).jl_db_comp=S})();
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: jl_db_comp
3
- Version: 0.1.10
3
+ Version: 0.1.11
4
4
  Summary: A JupyterLab extension to complete db queries in jupyterlab notebooks
5
5
  Project-URL: Homepage, https://github.com/Ben-Herz/jl_db_completer
6
6
  Project-URL: Bug Tracker, https://github.com/Ben-Herz/jl_db_completer/issues
@@ -0,0 +1,20 @@
1
+ jl_db_comp/__init__.py,sha256=L_Vn61SO_SScZsd1--0ug5RN35CibN9SH-yHBpGXR7g,1102
2
+ jl_db_comp/_version.py,sha256=YCUEZ43B30YYGak_DJCWxdpF9JOln9sgKvyOaT5e-ug,172
3
+ jl_db_comp/connections.py,sha256=kiNxyQzvd8FlebpCnCk2sFQxlm65L5T2BSZB1H-q7kQ,5222
4
+ jl_db_comp/routes.py,sha256=LU_6d519OFVt0mRYXUh3_jZ1n18fuE03y3BRK3sYY1w,29530
5
+ jl_db_comp/tests/__init__.py,sha256=mdd3JucOpGsN8PiHaypFnGLdv2nemscFn0piCZc2fls,40
6
+ jl_db_comp/tests/test_routes.py,sha256=gDoRBBtxKPkmXOCvnBeuN1w7SgpqSvrMc5YQrCIFAF4,1752
7
+ jl_db_comp-0.1.11.data/data/etc/jupyter/jupyter_server_config.d/jl_db_comp.json,sha256=fSmY5rlApxKaWIC5WmqvKJ2xW5lLmO_ybLJKogoJcq8,85
8
+ jl_db_comp-0.1.11.data/data/share/jupyter/labextensions/jl_db_comp/package.json,sha256=Y2ZEPpRa6jKGDLqhGhLN0zsTSOKQZoKJcCksvN7WTkk,6231
9
+ jl_db_comp-0.1.11.data/data/share/jupyter/labextensions/jl_db_comp/schemas/jl_db_comp/package.json.orig,sha256=QelQ1SJ0dKluvoNXCrDSkH5kmwmDQRiHiR_Of3F2rm4,7091
10
+ jl_db_comp-0.1.11.data/data/share/jupyter/labextensions/jl_db_comp/schemas/jl_db_comp/plugin.json,sha256=PM45pSfu6jtKNdgUDNumO5-YmQIdCAGO_0x_4Ec6ldg,963
11
+ jl_db_comp-0.1.11.data/data/share/jupyter/labextensions/jl_db_comp/static/171.c84e54337cfd177ebe22.js,sha256=yE5UM3z9F36-IkfeEp0-IZHIp4EXDzysa6rsDnuCM7I,9395
12
+ jl_db_comp-0.1.11.data/data/share/jupyter/labextensions/jl_db_comp/static/728.6552504d5b9b27551bc5.js,sha256=ZVJQTVubJ1UbxRj0sICSOtdLfqM4b9Qwf9HLhKXyhb4,4402
13
+ jl_db_comp-0.1.11.data/data/share/jupyter/labextensions/jl_db_comp/static/remoteEntry.2f90ff333d0ac2cbb0a8.js,sha256=L5D_Mz0KwsuwqOv2qtC0fJxjIbbUlKs2RYZgex6Brt8,6791
14
+ jl_db_comp-0.1.11.data/data/share/jupyter/labextensions/jl_db_comp/static/style.js,sha256=kHmCPOWdRY7r0Voabh7CMyxQJA6oPseEwKF6mcW--Zs,153
15
+ jl_db_comp-0.1.11.data/data/share/jupyter/labextensions/jl_db_comp/static/third-party-licenses.json,sha256=W6N2sSD7tQihMqQk64F9xMd1Flfr2KO97esAiHUOYdM,2453
16
+ jl_db_comp-0.1.11.data/data/share/jupyter/labextensions/jl_db_comp/install.json,sha256=AkhpkuSkRFqmwkttzIRdh8UApmBhvmiodyEPb9WylXc,181
17
+ jl_db_comp-0.1.11.dist-info/METADATA,sha256=zHWBuGjjxVdleyvCRVtNKO-JxtnLMyEOV1ZaeKdoVH0,17252
18
+ jl_db_comp-0.1.11.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
19
+ jl_db_comp-0.1.11.dist-info/licenses/LICENSE,sha256=Oz0oICGKssavDqCu6-PnQedLBaqF1DaZvDyJsGCSIzE,1523
20
+ jl_db_comp-0.1.11.dist-info/RECORD,,
@@ -1 +0,0 @@
1
- "use strict";(self.webpackChunkjl_db_comp=self.webpackChunkjl_db_comp||[]).push([[171],{171(e,n,t){t.r(n),t.d(n,{default:()=>m});var o=t(141),s=t(931),i=t(249),a=t(125),r=t(526);async function c(e="",n={}){const t=a.ServerConnection.makeSettings(),o=r.URLExt.join(t.baseUrl,"jl-db-comp",e);let s;try{s=await a.ServerConnection.makeRequest(o,n,t)}catch(e){throw new a.ServerConnection.NetworkError(e)}let i=await s.text();if(i.length>0)try{i=JSON.parse(i)}catch(e){console.log("Not a JSON response body.",s)}if(!s.ok)throw new a.ServerConnection.ResponseError(s,i.message||i);return i}class l{constructor(e,n){this.identifier="jl_db_comp:postgres-completer",this.renderer=null,this._cache=new Map,this._cacheTTL=3e5,this._settings=null,this._notebookTracker=null,this._connectionName="",this._schema="public",this._enabled=!0,this._availableConnections=[],this._cachedKernelConfig=null,this._kernelConfigCacheTime=0,this._kernelConfigCacheTTL=3e4,this._sqlKeywords=["select","from","join","where","insert","update","delete","inner","left","right","outer","on","group","order","by","having","into","values","set"],this._notebookTracker=n||null,e&&(this._settings=e,this._loadSettings(),e.changed.connect(()=>{this._loadSettings()})),this._loadAvailableConnections()}_loadSettings(){this._settings&&(this._connectionName=this._settings.get("connectionName").composite,this._schema=this._settings.get("schema").composite,this._enabled=this._settings.get("enabled").composite)}async _loadAvailableConnections(){try{const e=await async function(){try{return await c("connections",{method:"GET"})}catch(e){if(e instanceof a.ServerConnection.ResponseError)console.error(`Failed to fetch connections: ${e.message}`);else{const n=e instanceof Error?e.message:"Unknown error";console.error(`Failed to fetch connections: ${n}`)}return{status:"error",connections:{},filePath:null,message:"Failed to fetch connections"}}}();"success"===e.status&&(this._availableConnections=Object.keys(e.connections))}catch(e){console.warn("Failed to load available connections:",e)}}async _getKernelConfig(){var e;const n=Date.now();if(this._cachedKernelConfig&&n-this._kernelConfigCacheTime<this._kernelConfigCacheTTL)return this._cachedKernelConfig;if(!this._notebookTracker)return null;const t=this._notebookTracker.currentWidget;if(!t)return null;const o=null===(e=t.sessionContext.session)||void 0===e?void 0:e.kernel;if(!o)return null;try{const e=o.requestExecute({code:"\nimport json\nimport os\nresult = {'connection': '', 'dsn_filename': ''}\n\n# Get active connection\ntry:\n from sql.connection import ConnectionManager\n conn = ConnectionManager.current\n if conn:\n for alias, c in ConnectionManager.connections.items():\n if c is conn:\n result['connection'] = alias\n break\nexcept:\n pass\n\n# Get dsn_filename from SqlMagic instance\ndsn_filename = None\ntry:\n from sql.magic import SqlMagic\n ip = get_ipython()\n if ip:\n for name, inst in ip.magics_manager.registry.items():\n if isinstance(inst, SqlMagic):\n dsn_filename = inst.dsn_filename\n break\nexcept:\n pass\n\n# Fallback: try to get from config\nif not dsn_filename:\n try:\n ip = get_ipython()\n if ip and hasattr(ip, 'config'):\n sql_config = ip.config.get('SqlMagic', {})\n if 'dsn_filename' in sql_config:\n dsn_filename = sql_config['dsn_filename']\n except:\n pass\n\n# Convert to absolute path if we have a dsn_filename\nif dsn_filename:\n if not os.path.isabs(dsn_filename):\n # Resolve relative to current working directory\n dsn_filename = os.path.abspath(dsn_filename)\n result['dsn_filename'] = dsn_filename\n\nprint(json.dumps(result))\n",silent:!0,store_history:!1}),t=await new Promise(n=>{let t="";e.onIOPub=e=>{if("stream"===e.header.msg_type){const n=e.content;"stdout"===n.name&&(t+=n.text)}},e.done.then(()=>{try{const e=JSON.parse(t.trim());n({connection:e.connection||"",dsnFilename:e.dsn_filename||""})}catch(e){n(null)}}).catch(()=>{n(null)})});return t&&(this._cachedKernelConfig=t,this._kernelConfigCacheTime=n),t}catch(e){return console.warn("Failed to get jupysql config from kernel:",e),null}}async isApplicable(e){if(!this._enabled)return!1;const n=e.editor;if(!n)return!1;const t=n.model.sharedModel.getSource();if(!t)return!1;const o=t.toLowerCase();return this._sqlKeywords.some(e=>o.includes(e))}async fetch(e,n){var t;if(!this._enabled)return{start:e.offset,end:e.offset,items:[]};const{text:o,offset:s}=e,i=this._extractContext(o,s);let r;if(i.jsonbColumn){const e=(null===(t=i.jsonbPath)||void 0===t?void 0:t.join("."))||"";r=`${i.schemaOrTable?`${i.schemaOrTable}.`:""}${i.jsonbColumn}->${e}.${i.prefix}`.toLowerCase()}else r=i.schema&&i.tableName?`${i.schema}.${i.tableName}.${i.prefix}`.toLowerCase():i.schemaOrTable?`${i.schemaOrTable}.${i.prefix}`.toLowerCase():i.prefix.toLowerCase();const l=this._getCached(r);if(l)return this._formatReply(l,e.offset,i.prefix);try{const n=await this._getKernelConfig();let t,o=this._connectionName;n&&(n.dsnFilename&&(t=n.dsnFilename),!o&&n.connection&&(o=n.connection)),!o&&this._availableConnections.length>0&&(o=this._availableConnections[0]);const s=await async function(e,n="",t="public",o,s,i,r,l){try{const a=new URLSearchParams;e&&a.append("connection",e),l&&a.append("connections_file",l),n&&a.append("prefix",n),a.append("schema",t),o&&a.append("table",o),s&&a.append("schema_or_table",s),i&&(a.append("jsonb_column",i),r&&r.length>0&&a.append("jsonb_path",JSON.stringify(r)));const h=`completions?${a.toString()}`,m=await c(h,{method:"GET"});return"error"===m.status?(console.error("PostgreSQL completion error:",m.message),[]):i&&m.jsonbKeys?m.jsonbKeys:o||s?m.columns.length>0?m.columns:m.tables:[...m.tables,...m.columns]}catch(e){if(e instanceof a.ServerConnection.ResponseError){const n=e.response.status;let t=e.message;"string"==typeof t&&(t.includes("<!DOCTYPE")||t.includes("<html"))&&(t=`HTML error page (${t.substring(0,100)}...)`),console.error(`PostgreSQL completions API failed (${n}): ${t}`)}else{const n=e instanceof Error?e.message:"Unknown error";console.error(`PostgreSQL completions API failed: ${n}`)}return[]}}(o||void 0,i.prefix,i.schema||this._schema,i.tableName,i.schemaOrTable,i.jsonbColumn,i.jsonbPath,t);return this._cache.set(r,{items:s,timestamp:Date.now()}),this._formatReply(s,e.offset,i.prefix)}catch(n){return console.error("Failed to fetch PostgreSQL completions:",n),{start:e.offset,end:e.offset,items:[]}}}_extractContext(e,n){const t=e.substring(0,n);if(t.includes("->")){const e=e=>{const n=[],t=/['"]?([\w]+)['"]?\s*->/g;let o;for(;null!==(o=t.exec(e));)n.push(o[1]);const s=e.lastIndexOf("->");let i="";return i=s>=0?e.substring(s+2).trim().replace(/['"]/g,""):e.trim().replace(/['"]/g,""),{jsonbPath:n,prefix:i}},n=t.match(/([\w]+)\.([\w]+)\.([\w]+)\s*->\s*(.*)$/);if(n){const t=n[1],o=n[2],s=n[3],i=n[4],{jsonbPath:a,prefix:r}=e(i);return{schema:t,tableName:o,jsonbColumn:s,jsonbPath:a,prefix:r}}const o=t.match(/([\w]+)\.([\w]+)\s*->\s*(.*)$/);if(o){const n=o[1],t=o[2],s=o[3],{jsonbPath:i,prefix:a}=e(s);return{schemaOrTable:n,jsonbColumn:t,jsonbPath:i,prefix:a}}const s=t.match(/([\w]+)\s*->\s*(.*)$/);if(s){const n=s[1],t=s[2],{jsonbPath:o,prefix:i}=e(t);return{jsonbColumn:n,jsonbPath:o,prefix:i}}}const o=t.match(/([\w]+)\.([\w]+)\.([\w]*)$/);if(o)return{schema:o[1],tableName:o[2],prefix:o[3]};const s=t.match(/([\w]+)\.([\w]*)$/);if(s)return{schemaOrTable:s[1],prefix:s[2]};const i=t.match(/[\w]+$/),a=i?i[0]:"",r=e.toLowerCase().match(/\bfrom\s+([\w]+\.)?[\w]+/);if(r){const e=r[0].match(/\bfrom\s+(?:([\w]+)\.)?([\w]+)/);if(e){const n=e[1],t=e[2];return n?{schema:n,tableName:t,prefix:a}:{schemaOrTable:t,prefix:a}}}return{prefix:a}}_getCached(e){const n=e.toLowerCase(),t=this._cache.get(n);return t?Date.now()-t.timestamp>this._cacheTTL?(this._cache.delete(n),null):t.items:null}_formatReply(e,n,t){return{start:n-t.length,end:n,items:e.map(e=>{let n=e.name,t=e.name;"jsonb_key"===e.type&&(t=`'${e.name}'`),"column"===e.type&&e.table&&(n=`${e.name} (${e.table})`);let o,s="📊",i=e.name;return"table"===e.type?s="📋":"view"===e.type?s="👁️":"jsonb_key"===e.type&&(s="🔑",i=`0000${e.name}`),"column"===e.type&&e.dataType&&e.table?o=`${e.table}.${e.name}: ${e.dataType}`:"jsonb_key"===e.type&&e.keyPath&&(o=`JSONB key: ${e.keyPath.join(" -> ")}`),{label:`${s} ${n}`,insertText:t,sortText:i,type:e.type,documentation:o}})}}clearCache(){this._cache.clear()}}const h="jl_db_comp:plugin",m={id:h,description:"A JupyterLab extension to complete db queries in jupyterlab notebooks",autoStart:!0,requires:[o.ICompletionProviderManager],optional:[i.ISettingRegistry,s.INotebookTracker],activate:(e,n,t,o)=>{let s;t?t.load(h).then(e=>{s=new l(e,o),n.registerProvider(s),console.log("JupyterLab extension jl_db_comp is activated!")}).catch(e=>{console.error("Failed to load settings for jl_db_comp:",e),s=new l(null,o),n.registerProvider(s),console.log("JupyterLab extension jl_db_comp is activated!")}):(s=new l(null,o),n.registerProvider(s),console.log("JupyterLab extension jl_db_comp is activated!"))}}}}]);
@@ -1,20 +0,0 @@
1
- jl_db_comp/__init__.py,sha256=L_Vn61SO_SScZsd1--0ug5RN35CibN9SH-yHBpGXR7g,1102
2
- jl_db_comp/_version.py,sha256=aM0QVK4NJP1CO1SzDAG0tn7KvEzshOfEQ6UeJq1gd_Q,172
3
- jl_db_comp/connections.py,sha256=kiNxyQzvd8FlebpCnCk2sFQxlm65L5T2BSZB1H-q7kQ,5222
4
- jl_db_comp/routes.py,sha256=5u3w5THejkXT9-gz-qKQmJPkDJAVyU2aE0mq_nIn7sE,25798
5
- jl_db_comp/tests/__init__.py,sha256=mdd3JucOpGsN8PiHaypFnGLdv2nemscFn0piCZc2fls,40
6
- jl_db_comp/tests/test_routes.py,sha256=gDoRBBtxKPkmXOCvnBeuN1w7SgpqSvrMc5YQrCIFAF4,1752
7
- jl_db_comp-0.1.10.data/data/etc/jupyter/jupyter_server_config.d/jl_db_comp.json,sha256=fSmY5rlApxKaWIC5WmqvKJ2xW5lLmO_ybLJKogoJcq8,85
8
- jl_db_comp-0.1.10.data/data/share/jupyter/labextensions/jl_db_comp/package.json,sha256=paDyPeBJsp5c0Ai3gasecQUIYca-qiQQ_HmYIaZl2J8,6231
9
- jl_db_comp-0.1.10.data/data/share/jupyter/labextensions/jl_db_comp/schemas/jl_db_comp/package.json.orig,sha256=NkfTFsQezToYG0wniQXQhs-RxvQOO0hYP2Xx45167Y0,7091
10
- jl_db_comp-0.1.10.data/data/share/jupyter/labextensions/jl_db_comp/schemas/jl_db_comp/plugin.json,sha256=PM45pSfu6jtKNdgUDNumO5-YmQIdCAGO_0x_4Ec6ldg,963
11
- jl_db_comp-0.1.10.data/data/share/jupyter/labextensions/jl_db_comp/static/171.545011db1d7843ce3ab3.js,sha256=VFAR2x14Q846s4kdKlyFcZ2TteUdOQBEEg5PhHYVAEk,9208
12
- jl_db_comp-0.1.10.data/data/share/jupyter/labextensions/jl_db_comp/static/728.6552504d5b9b27551bc5.js,sha256=ZVJQTVubJ1UbxRj0sICSOtdLfqM4b9Qwf9HLhKXyhb4,4402
13
- jl_db_comp-0.1.10.data/data/share/jupyter/labextensions/jl_db_comp/static/remoteEntry.e1b6bbc28eb414e4e599.js,sha256=4ba7wo60FOTlmTdZOb0PCKd-MxRqOkleptMIRmQPFXs,6791
14
- jl_db_comp-0.1.10.data/data/share/jupyter/labextensions/jl_db_comp/static/style.js,sha256=kHmCPOWdRY7r0Voabh7CMyxQJA6oPseEwKF6mcW--Zs,153
15
- jl_db_comp-0.1.10.data/data/share/jupyter/labextensions/jl_db_comp/static/third-party-licenses.json,sha256=W6N2sSD7tQihMqQk64F9xMd1Flfr2KO97esAiHUOYdM,2453
16
- jl_db_comp-0.1.10.data/data/share/jupyter/labextensions/jl_db_comp/install.json,sha256=AkhpkuSkRFqmwkttzIRdh8UApmBhvmiodyEPb9WylXc,181
17
- jl_db_comp-0.1.10.dist-info/METADATA,sha256=AyqolcEdJ8u6q2AKCKVb8imfxwqd8yBwc81NfqyhVyU,17252
18
- jl_db_comp-0.1.10.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
19
- jl_db_comp-0.1.10.dist-info/licenses/LICENSE,sha256=Oz0oICGKssavDqCu6-PnQedLBaqF1DaZvDyJsGCSIzE,1523
20
- jl_db_comp-0.1.10.dist-info/RECORD,,