jl-db-comp 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. jl_db_comp/__init__.py +36 -0
  2. jl_db_comp/_version.py +4 -0
  3. jl_db_comp/labextension/build_log.json +728 -0
  4. jl_db_comp/labextension/package.json +219 -0
  5. jl_db_comp/labextension/schemas/jl_db_comp/package.json.orig +214 -0
  6. jl_db_comp/labextension/schemas/jl_db_comp/plugin.json +27 -0
  7. jl_db_comp/labextension/static/lib_index_js.a0969ed73da70f2cc451.js +561 -0
  8. jl_db_comp/labextension/static/lib_index_js.a0969ed73da70f2cc451.js.map +1 -0
  9. jl_db_comp/labextension/static/remoteEntry.5763ae02737e035e938c.js +560 -0
  10. jl_db_comp/labextension/static/remoteEntry.5763ae02737e035e938c.js.map +1 -0
  11. jl_db_comp/labextension/static/style.js +4 -0
  12. jl_db_comp/labextension/static/style_index_js.5364c7419a6b9db5d727.js +508 -0
  13. jl_db_comp/labextension/static/style_index_js.5364c7419a6b9db5d727.js.map +1 -0
  14. jl_db_comp/routes.py +332 -0
  15. jl_db_comp/tests/__init__.py +1 -0
  16. jl_db_comp/tests/test_routes.py +49 -0
  17. jl_db_comp-0.1.0.data/data/etc/jupyter/jupyter_server_config.d/jl_db_comp.json +7 -0
  18. jl_db_comp-0.1.0.data/data/share/jupyter/labextensions/jl_db_comp/build_log.json +728 -0
  19. jl_db_comp-0.1.0.data/data/share/jupyter/labextensions/jl_db_comp/install.json +5 -0
  20. jl_db_comp-0.1.0.data/data/share/jupyter/labextensions/jl_db_comp/package.json +219 -0
  21. jl_db_comp-0.1.0.data/data/share/jupyter/labextensions/jl_db_comp/schemas/jl_db_comp/package.json.orig +214 -0
  22. jl_db_comp-0.1.0.data/data/share/jupyter/labextensions/jl_db_comp/schemas/jl_db_comp/plugin.json +27 -0
  23. jl_db_comp-0.1.0.data/data/share/jupyter/labextensions/jl_db_comp/static/lib_index_js.a0969ed73da70f2cc451.js +561 -0
  24. jl_db_comp-0.1.0.data/data/share/jupyter/labextensions/jl_db_comp/static/lib_index_js.a0969ed73da70f2cc451.js.map +1 -0
  25. jl_db_comp-0.1.0.data/data/share/jupyter/labextensions/jl_db_comp/static/remoteEntry.5763ae02737e035e938c.js +560 -0
  26. jl_db_comp-0.1.0.data/data/share/jupyter/labextensions/jl_db_comp/static/remoteEntry.5763ae02737e035e938c.js.map +1 -0
  27. jl_db_comp-0.1.0.data/data/share/jupyter/labextensions/jl_db_comp/static/style.js +4 -0
  28. jl_db_comp-0.1.0.data/data/share/jupyter/labextensions/jl_db_comp/static/style_index_js.5364c7419a6b9db5d727.js +508 -0
  29. jl_db_comp-0.1.0.data/data/share/jupyter/labextensions/jl_db_comp/static/style_index_js.5364c7419a6b9db5d727.js.map +1 -0
  30. jl_db_comp-0.1.0.dist-info/METADATA +440 -0
  31. jl_db_comp-0.1.0.dist-info/RECORD +33 -0
  32. jl_db_comp-0.1.0.dist-info/WHEEL +4 -0
  33. jl_db_comp-0.1.0.dist-info/licenses/LICENSE +29 -0
jl_db_comp/routes.py ADDED
@@ -0,0 +1,332 @@
1
+ import json
2
+ import os
3
+ from urllib.parse import unquote
4
+
5
+ from jupyter_server.base.handlers import APIHandler
6
+ from jupyter_server.utils import url_path_join
7
+ import tornado
8
+
9
+ try:
10
+ import psycopg2
11
+ PSYCOPG2_AVAILABLE = True
12
+ except ImportError:
13
+ PSYCOPG2_AVAILABLE = False
14
+
15
+
16
+ class PostgresCompletionsHandler(APIHandler):
17
+ """Handler for fetching PostgreSQL table and column completions."""
18
+
19
+ @tornado.web.authenticated
20
+ def get(self):
21
+ """Fetch completions from PostgreSQL database.
22
+
23
+ Query parameters:
24
+ - db_url: URL-encoded PostgreSQL connection string
25
+ - prefix: Optional prefix to filter results
26
+ - schema: Database schema (default: 'public')
27
+ - table: Optional table name to filter columns (only returns columns from this table)
28
+ - schema_or_table: Ambiguous identifier - backend determines if it's a schema or table
29
+ """
30
+ if not PSYCOPG2_AVAILABLE:
31
+ self.set_status(500)
32
+ self.finish(json.dumps({
33
+ "status": "error",
34
+ "message": "psycopg2 is not installed. Install with: pip install psycopg2-binary"
35
+ }))
36
+ return
37
+
38
+ try:
39
+ db_url = self.get_argument('db_url', None)
40
+ prefix = self.get_argument('prefix', '').lower()
41
+ schema = self.get_argument('schema', 'public')
42
+ table = self.get_argument('table', None)
43
+ schema_or_table = self.get_argument('schema_or_table', None)
44
+ jsonb_column = self.get_argument('jsonb_column', None)
45
+ jsonb_path_str = self.get_argument('jsonb_path', None)
46
+
47
+ if not db_url:
48
+ db_url = os.environ.get('POSTGRES_URL')
49
+ else:
50
+ db_url = unquote(db_url)
51
+
52
+ if not db_url:
53
+ self.finish(json.dumps({
54
+ "status": "success",
55
+ "tables": [],
56
+ "columns": [],
57
+ "jsonbKeys": [],
58
+ "message": "No database URL provided"
59
+ }))
60
+ return
61
+
62
+ # Parse JSON path if provided
63
+ jsonb_path = None
64
+ if jsonb_path_str:
65
+ try:
66
+ jsonb_path = json.loads(jsonb_path_str)
67
+ except json.JSONDecodeError:
68
+ jsonb_path = []
69
+
70
+ completions = self._fetch_completions(
71
+ db_url, schema, prefix, table, schema_or_table, jsonb_column, jsonb_path
72
+ )
73
+ self.finish(json.dumps(completions))
74
+
75
+ except psycopg2.Error as e:
76
+ error_msg = str(e).split('\n')[0]
77
+ self.log.error(f"PostgreSQL error: {error_msg}")
78
+ self.set_status(500)
79
+ self.finish(json.dumps({
80
+ "status": "error",
81
+ "message": f"Database error: {error_msg}",
82
+ "tables": [],
83
+ "columns": []
84
+ }))
85
+ except Exception as e:
86
+ error_msg = str(e)
87
+ self.log.error(f"Completion handler error: {error_msg}")
88
+ self.set_status(500)
89
+ self.finish(json.dumps({
90
+ "status": "error",
91
+ "message": f"Server error: {error_msg}",
92
+ "tables": [],
93
+ "columns": []
94
+ }))
95
+
96
+ def _fetch_completions(
97
+ self,
98
+ db_url: str,
99
+ schema: str,
100
+ prefix: str,
101
+ table: str = None,
102
+ schema_or_table: str = None,
103
+ jsonb_column: str = None,
104
+ jsonb_path: list = None
105
+ ) -> dict:
106
+ """Fetch table and column names from PostgreSQL.
107
+
108
+ Args:
109
+ db_url: PostgreSQL connection string
110
+ schema: Database schema name
111
+ prefix: Filter prefix (case-insensitive)
112
+ table: Optional table name to filter columns (only returns columns from this table)
113
+ schema_or_table: Ambiguous identifier - determine if it's a schema or table
114
+ jsonb_column: Optional JSONB column to extract keys from
115
+ jsonb_path: Optional path for nested JSONB key extraction
116
+
117
+ Returns:
118
+ Dictionary with tables, columns, and jsonbKeys arrays
119
+ """
120
+ conn = None
121
+ try:
122
+ conn = psycopg2.connect(db_url)
123
+ cursor = conn.cursor()
124
+
125
+ tables = []
126
+ columns = []
127
+ jsonb_keys = []
128
+
129
+ # Handle JSONB key extraction
130
+ if jsonb_column:
131
+ jsonb_keys = self._fetch_jsonb_keys(
132
+ cursor, schema, schema_or_table, jsonb_column, jsonb_path, prefix
133
+ )
134
+ cursor.close()
135
+ return {
136
+ "status": "success",
137
+ "tables": [],
138
+ "columns": [],
139
+ "jsonbKeys": jsonb_keys
140
+ }
141
+
142
+ # Handle schema_or_table: check if it's a schema first, then try as table
143
+ if schema_or_table:
144
+ # First, check if it's a schema
145
+ cursor.execute("""
146
+ SELECT schema_name
147
+ FROM information_schema.schemata
148
+ WHERE LOWER(schema_name) = %s
149
+ """, (schema_or_table.lower(),))
150
+
151
+ is_schema = cursor.fetchone() is not None
152
+
153
+ if is_schema:
154
+ # It's a schema - fetch tables and views from that schema
155
+ cursor.execute("""
156
+ SELECT table_name, table_type
157
+ FROM information_schema.tables
158
+ WHERE table_schema = %s
159
+ AND table_type IN ('BASE TABLE', 'VIEW')
160
+ AND LOWER(table_name) LIKE %s
161
+ ORDER BY table_name
162
+ """, (schema_or_table, f"{prefix}%"))
163
+
164
+ tables = [
165
+ {
166
+ "name": row[0],
167
+ "type": "view" if row[1] == 'VIEW' else "table"
168
+ }
169
+ for row in cursor.fetchall()
170
+ ]
171
+ else:
172
+ # Not a schema - treat as table name, fetch columns from default schema
173
+ cursor.execute("""
174
+ SELECT table_name, column_name, data_type
175
+ FROM information_schema.columns
176
+ WHERE table_schema = %s
177
+ AND LOWER(table_name) = %s
178
+ AND LOWER(column_name) LIKE %s
179
+ ORDER BY ordinal_position
180
+ """, (schema, schema_or_table.lower(), f"{prefix}%"))
181
+
182
+ columns = [
183
+ {
184
+ "name": row[1],
185
+ "table": row[0],
186
+ "dataType": row[2],
187
+ "type": "column"
188
+ }
189
+ for row in cursor.fetchall()
190
+ ]
191
+
192
+ # If table is specified with explicit schema, fetch columns from that table
193
+ elif table:
194
+ cursor.execute("""
195
+ SELECT table_name, column_name, data_type
196
+ FROM information_schema.columns
197
+ WHERE table_schema = %s
198
+ AND LOWER(table_name) = %s
199
+ AND LOWER(column_name) LIKE %s
200
+ ORDER BY ordinal_position
201
+ """, (schema, table.lower(), f"{prefix}%"))
202
+
203
+ columns = [
204
+ {
205
+ "name": row[1],
206
+ "table": row[0],
207
+ "dataType": row[2],
208
+ "type": "column"
209
+ }
210
+ for row in cursor.fetchall()
211
+ ]
212
+ else:
213
+ # No table or schema_or_table specified - fetch tables and views from default schema
214
+ cursor.execute("""
215
+ SELECT table_name, table_type
216
+ FROM information_schema.tables
217
+ WHERE table_schema = %s
218
+ AND table_type IN ('BASE TABLE', 'VIEW')
219
+ AND LOWER(table_name) LIKE %s
220
+ ORDER BY table_name
221
+ """, (schema, f"{prefix}%"))
222
+
223
+ tables = [
224
+ {
225
+ "name": row[0],
226
+ "type": "view" if row[1] == 'VIEW' else "table"
227
+ }
228
+ for row in cursor.fetchall()
229
+ ]
230
+
231
+ cursor.close()
232
+
233
+ return {
234
+ "status": "success",
235
+ "tables": tables,
236
+ "columns": columns
237
+ }
238
+
239
+ finally:
240
+ if conn:
241
+ conn.close()
242
+
243
+ def _fetch_jsonb_keys(
244
+ self,
245
+ cursor,
246
+ schema: str,
247
+ table_name: str,
248
+ jsonb_column: str,
249
+ jsonb_path: list = None,
250
+ prefix: str = ''
251
+ ) -> list:
252
+ """Extract unique JSONB keys from a column in a table.
253
+
254
+ Args:
255
+ cursor: Database cursor
256
+ schema: Database schema
257
+ table_name: Table containing the JSONB column (can be None)
258
+ jsonb_column: Name of the JSONB column
259
+ jsonb_path: Optional path for nested keys (e.g., ['user', 'profile'])
260
+ prefix: Filter prefix for keys
261
+
262
+ Returns:
263
+ List of JSONB key completion items
264
+ """
265
+ try:
266
+ # If no table specified, find tables with this JSONB column
267
+ if not table_name:
268
+ cursor.execute("""
269
+ SELECT table_name
270
+ FROM information_schema.columns
271
+ WHERE table_schema = %s
272
+ AND LOWER(column_name) = %s
273
+ AND data_type = 'jsonb'
274
+ LIMIT 1
275
+ """, (schema, jsonb_column.lower()))
276
+
277
+ result = cursor.fetchone()
278
+ if not result:
279
+ return []
280
+
281
+ table_name = result[0]
282
+
283
+ # Build the JSONB path expression
284
+ if jsonb_path and len(jsonb_path) > 0:
285
+ # For nested paths: column->>'key1'->>'key2'
286
+ path_expr = jsonb_column
287
+ for key in jsonb_path:
288
+ path_expr = f"{path_expr}->'{key}'"
289
+ else:
290
+ # For top-level keys: just the column
291
+ path_expr = jsonb_column
292
+
293
+ # Query to extract unique keys
294
+ # LIMIT to 1000 rows for performance (sample the table)
295
+ query = f"""
296
+ SELECT DISTINCT jsonb_object_keys({path_expr})
297
+ FROM {schema}.{table_name}
298
+ WHERE {path_expr} IS NOT NULL
299
+ AND jsonb_typeof({path_expr}) = 'object'
300
+ LIMIT 1000
301
+ """
302
+
303
+ cursor.execute(query)
304
+ keys = cursor.fetchall()
305
+
306
+ # Filter by prefix and format results
307
+ result = []
308
+ for row in keys:
309
+ key = row[0]
310
+ if key.lower().startswith(prefix):
311
+ result.append({
312
+ "name": key,
313
+ "type": "jsonb_key",
314
+ "keyPath": (jsonb_path or []) + [key]
315
+ })
316
+
317
+ return result
318
+
319
+ except psycopg2.Error as e:
320
+ self.log.error(f"JSONB key extraction error: {str(e).split(chr(10))[0]}")
321
+ return []
322
+
323
+
324
+ def setup_route_handlers(web_app):
325
+ """Register route handlers with the Jupyter server."""
326
+ host_pattern = ".*$"
327
+ base_url = web_app.settings["base_url"]
328
+
329
+ completions_route = url_path_join(base_url, "jl-db-comp", "completions")
330
+ handlers = [(completions_route, PostgresCompletionsHandler)]
331
+
332
+ web_app.add_handlers(host_pattern, handlers)
@@ -0,0 +1 @@
1
+ """Python unit tests for jl_db_comp."""
@@ -0,0 +1,49 @@
1
+ import json
2
+
3
+ import pytest
4
+ from tornado.httpclient import HTTPClientError
5
+
6
+
7
+ async def test_completions_no_db_url(jp_fetch):
8
+ """Test completions endpoint without database URL returns empty results."""
9
+ # When - fetch completions without db_url parameter
10
+ response = await jp_fetch("jl-db-comp", "completions")
11
+
12
+ # Then
13
+ assert response.code == 200
14
+ payload = json.loads(response.body)
15
+ assert payload["status"] == "success"
16
+ assert payload["tables"] == []
17
+ assert payload["columns"] == []
18
+ assert "No database URL provided" in payload.get("message", "")
19
+
20
+
21
+ async def test_completions_with_invalid_db_url(jp_fetch):
22
+ """Test completions endpoint with invalid database URL handles errors gracefully."""
23
+ # When - fetch completions with invalid db_url
24
+ with pytest.raises(HTTPClientError) as exc_info:
25
+ await jp_fetch(
26
+ "jl-db-comp", "completions", params={"db_url": "postgresql://invalid:url"}
27
+ )
28
+
29
+ # Then - should return 500 error with error message
30
+ assert exc_info.value.code == 500
31
+ payload = json.loads(exc_info.value.response.body)
32
+ assert payload["status"] == "error"
33
+ assert "message" in payload
34
+
35
+
36
+ async def test_completions_schema_parameter(jp_fetch):
37
+ """Test completions endpoint accepts schema parameter."""
38
+ # When - fetch completions with schema parameter
39
+ response = await jp_fetch(
40
+ "jl-db-comp", "completions", params={"schema": "public", "prefix": "test"}
41
+ )
42
+
43
+ # Then
44
+ assert response.code == 200
45
+ payload = json.loads(response.body)
46
+ assert payload["status"] == "success"
47
+ # Without a valid database, should return empty results
48
+ assert isinstance(payload["tables"], list)
49
+ assert isinstance(payload["columns"], list)
@@ -0,0 +1,7 @@
1
+ {
2
+ "ServerApp": {
3
+ "jpserver_extensions": {
4
+ "jl_db_comp": true
5
+ }
6
+ }
7
+ }