jl-db-comp 0.1.0__tar.gz → 0.1.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. jl_db_comp-0.1.2/CHANGELOG.md +44 -0
  2. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/PKG-INFO +103 -1
  3. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/README.md +102 -0
  4. jl_db_comp-0.1.2/connections.ini +23 -0
  5. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/jl_db_comp/_version.py +1 -1
  6. jl_db_comp-0.1.2/jl_db_comp/connections.py +173 -0
  7. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/jl_db_comp/labextension/package.json +3 -2
  8. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/jl_db_comp/labextension/schemas/jl_db_comp/package.json.orig +2 -1
  9. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/jl_db_comp/labextension/schemas/jl_db_comp/plugin.json +4 -4
  10. jl_db_comp-0.1.2/jl_db_comp/labextension/static/171.d366980651e0db8d978c.js +1 -0
  11. jl_db_comp-0.1.2/jl_db_comp/labextension/static/728.6552504d5b9b27551bc5.js +1 -0
  12. jl_db_comp-0.1.2/jl_db_comp/labextension/static/remoteEntry.2f5032a1d7560953515d.js +1 -0
  13. jl_db_comp-0.1.2/jl_db_comp/labextension/static/third-party-licenses.json +16 -0
  14. jl_db_comp-0.1.2/jl_db_comp/routes.py +678 -0
  15. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/jl_db_comp/tests/test_routes.py +1 -1
  16. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/package.json +2 -1
  17. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/pyproject.toml +1 -0
  18. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/schema/plugin.json +4 -4
  19. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/src/api.ts +59 -5
  20. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/src/index.ts +9 -6
  21. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/src/provider.ts +208 -6
  22. jl_db_comp-0.1.2/test_nb.ipynb +241 -0
  23. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/yarn.lock +590 -0
  24. jl_db_comp-0.1.0/CHANGELOG.md +0 -19
  25. jl_db_comp-0.1.0/Untitled.ipynb +0 -36
  26. jl_db_comp-0.1.0/jl_db_comp/labextension/build_log.json +0 -728
  27. jl_db_comp-0.1.0/jl_db_comp/labextension/static/lib_index_js.a0969ed73da70f2cc451.js +0 -561
  28. jl_db_comp-0.1.0/jl_db_comp/labextension/static/lib_index_js.a0969ed73da70f2cc451.js.map +0 -1
  29. jl_db_comp-0.1.0/jl_db_comp/labextension/static/remoteEntry.5763ae02737e035e938c.js +0 -560
  30. jl_db_comp-0.1.0/jl_db_comp/labextension/static/remoteEntry.5763ae02737e035e938c.js.map +0 -1
  31. jl_db_comp-0.1.0/jl_db_comp/labextension/static/style_index_js.5364c7419a6b9db5d727.js +0 -508
  32. jl_db_comp-0.1.0/jl_db_comp/labextension/static/style_index_js.5364c7419a6b9db5d727.js.map +0 -1
  33. jl_db_comp-0.1.0/jl_db_comp/routes.py +0 -332
  34. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/.copier-answers.yml +0 -0
  35. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/.gitignore +0 -0
  36. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/.prettierignore +0 -0
  37. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/.yarnrc.yml +0 -0
  38. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/AGENTS.md +0 -0
  39. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/CLAUDE.md +0 -0
  40. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/GEMINI.md +0 -0
  41. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/IMPLEMENTATION.md +0 -0
  42. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/LICENSE +0 -0
  43. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/QUICKSTART.md +0 -0
  44. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/RELEASE.md +0 -0
  45. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/TESTING.md +0 -0
  46. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/babel.config.js +0 -0
  47. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/conftest.py +0 -0
  48. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/install.json +0 -0
  49. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/jest.config.js +0 -0
  50. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/jl_db_comp/__init__.py +0 -0
  51. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/jl_db_comp/labextension/static/style.js +0 -0
  52. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/jl_db_comp/tests/__init__.py +0 -0
  53. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/jupyter-config/server-config/jl_db_comp.json +0 -0
  54. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/setup.py +0 -0
  55. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/src/__tests__/jl_db_comp.spec.ts +0 -0
  56. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/src/request.ts +0 -0
  57. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/style/base.css +0 -0
  58. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/style/index.css +0 -0
  59. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/style/index.js +0 -0
  60. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/tsconfig.json +0 -0
  61. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/tsconfig.test.json +0 -0
  62. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/ui-tests/README.md +0 -0
  63. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/ui-tests/jupyter_server_test_config.py +0 -0
  64. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/ui-tests/package.json +0 -0
  65. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/ui-tests/playwright.config.js +0 -0
  66. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/ui-tests/tests/jl_db_comp.spec.ts +0 -0
  67. {jl_db_comp-0.1.0 → jl_db_comp-0.1.2}/ui-tests/yarn.lock +0 -0
@@ -0,0 +1,44 @@
1
+ # Changelog
2
+
3
+ <!-- <START NEW CHANGELOG ENTRY> -->
4
+
5
+ ## 0.1.2
6
+
7
+ No merged PRs
8
+
9
+ <!-- <END NEW CHANGELOG ENTRY> -->
10
+
11
+ ## 0.1.1
12
+
13
+ ([Full Changelog](https://github.com/Ben-Herz/jl_db_completer/compare/v0.1.0...e66c6da581d00d9d3a90ca8a494e56a1af5e8536))
14
+
15
+ ### Enhancements made
16
+
17
+ - get connectin to db from jupysql connections.ini file instead of env [#2](https://github.com/Ben-Herz/jl_db_completer/pull/2) ([@Ben-Herz](https://github.com/Ben-Herz))
18
+
19
+ ### Other merged PRs
20
+
21
+ - Add JSONB diagnostics logging and troubleshooting endpoint [#1](https://github.com/Ben-Herz/jl_db_completer/pull/1) ([@Ben-Herz](https://github.com/Ben-Herz), [@claude](https://github.com/claude))
22
+
23
+ ### Contributors to this release
24
+
25
+ The following people contributed discussions, new ideas, code and documentation contributions, and review.
26
+ See [our definition of contributors](https://github-activity.readthedocs.io/en/latest/#how-does-this-tool-define-contributions-in-the-reports).
27
+
28
+ ([GitHub contributors page for this release](https://github.com/Ben-Herz/jl_db_completer/graphs/contributors?from=2025-12-30&to=2026-01-16&type=c))
29
+
30
+ @Ben-Herz ([activity](https://github.com/search?q=repo%3ABen-Herz%2Fjl_db_completer+involves%3ABen-Herz+updated%3A2025-12-30..2026-01-16&type=Issues)) | @claude ([activity](https://github.com/search?q=repo%3ABen-Herz%2Fjl_db_completer+involves%3Aclaude+updated%3A2025-12-30..2026-01-16&type=Issues))
31
+
32
+ ## 0.1.0 (2024-12-30)
33
+
34
+ Initial release of jl_db_comp - PostgreSQL autocomplete for JupyterLab.
35
+
36
+ ### Features
37
+
38
+ - PostgreSQL table and column name autocompletion
39
+ - Schema-aware completion (supports multiple schemas)
40
+ - JSONB key completion with nested path navigation
41
+ - Automatic FROM clause parsing to suggest columns from referenced tables
42
+ - Client-side caching with 5-minute TTL
43
+ - Configurable database connection via environment variable or JupyterLab settings
44
+ - Smart SQL keyword detection (SELECT, FROM, JOIN, WHERE, etc.)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: jl_db_comp
3
- Version: 0.1.0
3
+ Version: 0.1.2
4
4
  Summary: A JupyterLab extension to complete db queries in jupyterlab notebooks
5
5
  Project-URL: Homepage, https://github.com/Ben-Herz/jl_db_completer
6
6
  Project-URL: Bug Tracker, https://github.com/Ben-Herz/jl_db_completer/issues
@@ -300,6 +300,22 @@ the frontend extension, check the frontend extension is installed:
300
300
  jupyter labextension list
301
301
  ```
302
302
 
303
+ ### JSONB Autocompletion Not Working
304
+
305
+ If JSONB key completion works on one database but not another, check the server logs (terminal running `jupyter lab`) for diagnostic messages when triggering completion.
306
+
307
+ You can also call the diagnostics endpoint directly:
308
+
309
+ ```
310
+ GET /jl-db-comp/jsonb-diagnostics?table=TABLE_NAME&column=COLUMN_NAME
311
+ ```
312
+
313
+ This returns the JSONB type distribution and a recommendation. Common causes:
314
+
315
+ - **All NULL values** - The column has no data
316
+ - **Arrays instead of objects** - Keys can only be extracted from `{}` objects, not `[]` arrays
317
+ - **Wrong schema** - Add `&schema=your_schema` if not using `public`
318
+
303
319
  ## Contributing
304
320
 
305
321
  ### Development install
@@ -359,6 +375,92 @@ In development mode, you will also need to remove the symlink created by `jupyte
359
375
  command. To find its location, you can run `jupyter labextension list` to figure out where the `labextensions`
360
376
  folder is located. Then you can remove the symlink named `jl_db_comp` within that folder.
361
377
 
378
+ ### Contributing Features and Fixes
379
+
380
+ This project uses a **pull request workflow** for all changes. This enables automated changelog generation and streamlined releases.
381
+
382
+ #### Development Workflow
383
+
384
+ **1. Create a feature branch:**
385
+
386
+ ```bash
387
+ # For new features
388
+ git checkout -b feature/descriptive-name
389
+
390
+ # For bug fixes
391
+ git checkout -b fix/bug-description
392
+
393
+ # For documentation updates
394
+ git checkout -b docs/what-you-changed
395
+ ```
396
+
397
+ **2. Make your changes:**
398
+
399
+ - Write code following the guidelines in `AGENTS.md` / `CLAUDE.md`
400
+ - Add tests for new functionality
401
+ - Update documentation as needed
402
+ - Run tests locally: `pytest -vv -r ap --cov jl_db_comp`
403
+ - Format code: `jlpm run lint`
404
+
405
+ **3. Commit and push:**
406
+
407
+ ```bash
408
+ git add .
409
+ git commit -m "Brief description of changes"
410
+ git push origin feature/descriptive-name
411
+ ```
412
+
413
+ **4. Create a Pull Request:**
414
+
415
+ - Go to https://github.com/Ben-Herz/jl_db_completer/pulls
416
+ - Click "New pull request"
417
+ - Select your feature branch
418
+ - **Write a clear PR title** - this becomes the changelog entry!
419
+ - ✅ Good: "Add support for MySQL databases"
420
+ - ✅ Good: "Fix JSONB key completion for nested objects"
421
+ - ❌ Bad: "updates", "fix bug", "changes"
422
+ - Add a description explaining what changed and why
423
+ - **Add a label** (REQUIRED for CI to pass):
424
+ - `enhancement` - New features
425
+ - `bug` - Bug fixes
426
+ - `documentation` - Documentation updates
427
+ - `maintenance` - Maintenance tasks
428
+
429
+ > **Note:** At least one label is required. The "Enforce PR label" CI check will fail if no label is set. You can add labels when creating the PR or afterwards from the PR sidebar.
430
+
431
+ - Click "Create pull request"
432
+
433
+ **5. Review and merge:**
434
+
435
+ - CI checks must pass (build, tests, linting)
436
+ - Address any review feedback
437
+ - Once approved, merge the PR
438
+ - Delete the feature branch after merging
439
+
440
+ #### Automated Releases
441
+
442
+ Releases are created using GitHub Actions workflows:
443
+
444
+ **Step 1: Prepare Release** (Manual workflow)
445
+
446
+ - Go to **Actions** → **"Step 1: Prep Release"** → **"Run workflow"**
447
+ - Optionally specify version (e.g., `0.2.0`, `patch`, `minor`, `major`)
448
+ - The workflow will:
449
+ - Bump the version
450
+ - Generate changelog from merged PRs since last release
451
+ - Create a draft GitHub release
452
+
453
+ **Step 2: Publish Release** (Manual workflow)
454
+
455
+ - Review the draft release
456
+ - Go to **Actions** → **"Step 2: Publish Release"** → **"Run workflow"**
457
+ - The workflow will:
458
+ - Publish to PyPI
459
+ - Publish to npm
460
+ - Make the GitHub release public
461
+
462
+ **Note:** Automated releases require repository secrets for PyPI (`NPM_TOKEN`) and GitHub App credentials. See [RELEASE.md](RELEASE.md) for setup details.
463
+
362
464
  ### Testing the extension
363
465
 
364
466
  #### Server tests
@@ -235,6 +235,22 @@ the frontend extension, check the frontend extension is installed:
235
235
  jupyter labextension list
236
236
  ```
237
237
 
238
+ ### JSONB Autocompletion Not Working
239
+
240
+ If JSONB key completion works on one database but not another, check the server logs (terminal running `jupyter lab`) for diagnostic messages when triggering completion.
241
+
242
+ You can also call the diagnostics endpoint directly:
243
+
244
+ ```
245
+ GET /jl-db-comp/jsonb-diagnostics?table=TABLE_NAME&column=COLUMN_NAME
246
+ ```
247
+
248
+ This returns the JSONB type distribution and a recommendation. Common causes:
249
+
250
+ - **All NULL values** - The column has no data
251
+ - **Arrays instead of objects** - Keys can only be extracted from `{}` objects, not `[]` arrays
252
+ - **Wrong schema** - Add `&schema=your_schema` if not using `public`
253
+
238
254
  ## Contributing
239
255
 
240
256
  ### Development install
@@ -294,6 +310,92 @@ In development mode, you will also need to remove the symlink created by `jupyte
294
310
  command. To find its location, you can run `jupyter labextension list` to figure out where the `labextensions`
295
311
  folder is located. Then you can remove the symlink named `jl_db_comp` within that folder.
296
312
 
313
+ ### Contributing Features and Fixes
314
+
315
+ This project uses a **pull request workflow** for all changes. This enables automated changelog generation and streamlined releases.
316
+
317
+ #### Development Workflow
318
+
319
+ **1. Create a feature branch:**
320
+
321
+ ```bash
322
+ # For new features
323
+ git checkout -b feature/descriptive-name
324
+
325
+ # For bug fixes
326
+ git checkout -b fix/bug-description
327
+
328
+ # For documentation updates
329
+ git checkout -b docs/what-you-changed
330
+ ```
331
+
332
+ **2. Make your changes:**
333
+
334
+ - Write code following the guidelines in `AGENTS.md` / `CLAUDE.md`
335
+ - Add tests for new functionality
336
+ - Update documentation as needed
337
+ - Run tests locally: `pytest -vv -r ap --cov jl_db_comp`
338
+ - Format code: `jlpm run lint`
339
+
340
+ **3. Commit and push:**
341
+
342
+ ```bash
343
+ git add .
344
+ git commit -m "Brief description of changes"
345
+ git push origin feature/descriptive-name
346
+ ```
347
+
348
+ **4. Create a Pull Request:**
349
+
350
+ - Go to https://github.com/Ben-Herz/jl_db_completer/pulls
351
+ - Click "New pull request"
352
+ - Select your feature branch
353
+ - **Write a clear PR title** - this becomes the changelog entry!
354
+ - ✅ Good: "Add support for MySQL databases"
355
+ - ✅ Good: "Fix JSONB key completion for nested objects"
356
+ - ❌ Bad: "updates", "fix bug", "changes"
357
+ - Add a description explaining what changed and why
358
+ - **Add a label** (REQUIRED for CI to pass):
359
+ - `enhancement` - New features
360
+ - `bug` - Bug fixes
361
+ - `documentation` - Documentation updates
362
+ - `maintenance` - Maintenance tasks
363
+
364
+ > **Note:** At least one label is required. The "Enforce PR label" CI check will fail if no label is set. You can add labels when creating the PR or afterwards from the PR sidebar.
365
+
366
+ - Click "Create pull request"
367
+
368
+ **5. Review and merge:**
369
+
370
+ - CI checks must pass (build, tests, linting)
371
+ - Address any review feedback
372
+ - Once approved, merge the PR
373
+ - Delete the feature branch after merging
374
+
375
+ #### Automated Releases
376
+
377
+ Releases are created using GitHub Actions workflows:
378
+
379
+ **Step 1: Prepare Release** (Manual workflow)
380
+
381
+ - Go to **Actions** → **"Step 1: Prep Release"** → **"Run workflow"**
382
+ - Optionally specify version (e.g., `0.2.0`, `patch`, `minor`, `major`)
383
+ - The workflow will:
384
+ - Bump the version
385
+ - Generate changelog from merged PRs since last release
386
+ - Create a draft GitHub release
387
+
388
+ **Step 2: Publish Release** (Manual workflow)
389
+
390
+ - Review the draft release
391
+ - Go to **Actions** → **"Step 2: Publish Release"** → **"Run workflow"**
392
+ - The workflow will:
393
+ - Publish to PyPI
394
+ - Publish to npm
395
+ - Make the GitHub release public
396
+
397
+ **Note:** Automated releases require repository secrets for PyPI (`NPM_TOKEN`) and GitHub App credentials. See [RELEASE.md](RELEASE.md) for setup details.
398
+
297
399
  ### Testing the extension
298
400
 
299
401
  #### Server tests
@@ -0,0 +1,23 @@
1
+ [pg]
2
+ drivername = postgresql
3
+ username = postgres
4
+ password = example
5
+ host = localhost
6
+ port = 5432
7
+ database = ehrexample
8
+
9
+ [mssql]
10
+ drivername = mssql+pymssql
11
+ username = sa
12
+ password = Example123!
13
+ host = localhost
14
+ port = 1433
15
+ database = ehrexample
16
+
17
+ [mysql]
18
+ drivername = mysql+pymysql
19
+ username = root
20
+ password = example
21
+ host = localhost
22
+ port = 3306
23
+ database = ehrexample
@@ -1,4 +1,4 @@
1
1
  # This file is auto-generated by Hatchling. As such, do not:
2
2
  # - modify
3
3
  # - track in version control e.g. be sure to add to .gitignore
4
- __version__ = VERSION = '0.1.0'
4
+ __version__ = VERSION = '0.1.2'
@@ -0,0 +1,173 @@
1
+ """
2
+ Connection management module for jupysql connections.ini integration.
3
+
4
+ This module handles reading database connection configurations from jupysql's
5
+ connections.ini file format and building connection URLs from them.
6
+ """
7
+
8
+ import configparser
9
+ import os
10
+ from pathlib import Path
11
+ from typing import Optional
12
+ from urllib.parse import quote_plus
13
+
14
+
15
+ # Default locations to search for connections.ini
16
+ DEFAULT_CONNECTIONS_PATHS = [
17
+ Path.home() / '.jupysql' / 'connections.ini', # jupysql default
18
+ Path.cwd() / 'connections.ini', # Current working directory
19
+ ]
20
+
21
+
22
+ def find_connections_file(custom_path: Optional[str] = None) -> Optional[Path]:
23
+ """Find the connections.ini file.
24
+
25
+ Args:
26
+ custom_path: Optional custom path to connections.ini file
27
+
28
+ Returns:
29
+ Path to the connections.ini file, or None if not found
30
+ """
31
+ # Check custom path first
32
+ if custom_path:
33
+ custom = Path(custom_path)
34
+ if custom.exists():
35
+ return custom
36
+
37
+ # Check default locations
38
+ for path in DEFAULT_CONNECTIONS_PATHS:
39
+ if path.exists():
40
+ return path
41
+
42
+ return None
43
+
44
+
45
+ def parse_connections_file(file_path: Path) -> dict[str, dict[str, str]]:
46
+ """Parse connections.ini file into a dictionary.
47
+
48
+ Args:
49
+ file_path: Path to the connections.ini file
50
+
51
+ Returns:
52
+ Dictionary mapping connection names to their configuration
53
+ """
54
+ config = configparser.ConfigParser()
55
+ config.read(file_path)
56
+
57
+ connections = {}
58
+ for section in config.sections():
59
+ connections[section] = dict(config[section])
60
+
61
+ return connections
62
+
63
+
64
+ def build_connection_url(connection_config: dict[str, str]) -> str:
65
+ """Build a database URL from connection configuration.
66
+
67
+ Args:
68
+ connection_config: Dictionary with connection parameters:
69
+ - drivername: database backend (e.g., 'postgresql')
70
+ - username: database user
71
+ - password: database password
72
+ - host: database host
73
+ - port: database port
74
+ - database: database name
75
+ - query: optional query parameters (as string or dict)
76
+
77
+ Returns:
78
+ Connection URL string
79
+ """
80
+ drivername = connection_config.get('drivername', 'postgresql')
81
+ username = connection_config.get('username', '')
82
+ password = connection_config.get('password', '')
83
+ host = connection_config.get('host', 'localhost')
84
+ port = connection_config.get('port', '5432')
85
+ database = connection_config.get('database', '')
86
+
87
+ # URL encode username and password for special characters
88
+ if username:
89
+ username = quote_plus(username)
90
+ if password:
91
+ password = quote_plus(password)
92
+
93
+ # Build the URL
94
+ if username and password:
95
+ auth = f'{username}:{password}@'
96
+ elif username:
97
+ auth = f'{username}@'
98
+ else:
99
+ auth = ''
100
+
101
+ url = f'{drivername}://{auth}{host}:{port}/{database}'
102
+
103
+ # Handle query parameters if present
104
+ query = connection_config.get('query', '')
105
+ if query:
106
+ # Query could be a string representation of a dict or a plain string
107
+ if isinstance(query, str) and query.startswith('{'):
108
+ # Try to parse as dict-like string
109
+ try:
110
+ import ast
111
+ query_dict = ast.literal_eval(query)
112
+ if isinstance(query_dict, dict):
113
+ query_str = '&'.join(
114
+ f'{k}={v}' for k, v in query_dict.items()
115
+ )
116
+ url = f'{url}?{query_str}'
117
+ except (ValueError, SyntaxError):
118
+ pass
119
+ elif isinstance(query, str) and query:
120
+ url = f'{url}?{query}'
121
+
122
+ return url
123
+
124
+
125
+ def get_connection_url(
126
+ connection_name: str,
127
+ connections_file_path: Optional[str] = None
128
+ ) -> Optional[str]:
129
+ """Get a connection URL by name from connections.ini.
130
+
131
+ Args:
132
+ connection_name: Name of the connection (section name in ini file)
133
+ connections_file_path: Optional custom path to connections.ini
134
+
135
+ Returns:
136
+ Connection URL string, or None if connection not found
137
+ """
138
+ file_path = find_connections_file(connections_file_path)
139
+ if not file_path:
140
+ return None
141
+
142
+ connections = parse_connections_file(file_path)
143
+ if connection_name not in connections:
144
+ return None
145
+
146
+ return build_connection_url(connections[connection_name])
147
+
148
+
149
+ def list_connections(
150
+ connections_file_path: Optional[str] = None
151
+ ) -> dict[str, dict]:
152
+ """List all available connections from connections.ini.
153
+
154
+ Args:
155
+ connections_file_path: Optional custom path to connections.ini
156
+
157
+ Returns:
158
+ Dictionary mapping connection names to their config (without password)
159
+ """
160
+ file_path = find_connections_file(connections_file_path)
161
+ if not file_path:
162
+ return {}
163
+
164
+ connections = parse_connections_file(file_path)
165
+
166
+ # Return connections without exposing passwords
167
+ safe_connections = {}
168
+ for name, config in connections.items():
169
+ safe_config = {k: v for k, v in config.items() if k != 'password'}
170
+ safe_config['has_password'] = 'password' in config
171
+ safe_connections[name] = safe_config
172
+
173
+ return safe_connections
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "jl_db_comp",
3
- "version": "0.1.0",
3
+ "version": "0.1.2",
4
4
  "description": "A JupyterLab extension to complete db queries in jupyterlab notebooks",
5
5
  "keywords": [
6
6
  "jupyter",
@@ -60,6 +60,7 @@
60
60
  "@jupyterlab/application": "^4.0.0",
61
61
  "@jupyterlab/completer": "^4.0.0",
62
62
  "@jupyterlab/coreutils": "^6.0.0",
63
+ "@jupyterlab/notebook": "^4.0.0",
63
64
  "@jupyterlab/services": "^7.0.0",
64
65
  "@jupyterlab/settingregistry": "^4.0.0"
65
66
  },
@@ -117,7 +118,7 @@
117
118
  "outputDir": "jl_db_comp/labextension",
118
119
  "schemaDir": "schema",
119
120
  "_build": {
120
- "load": "static/remoteEntry.5763ae02737e035e938c.js",
121
+ "load": "static/remoteEntry.2f5032a1d7560953515d.js",
121
122
  "extension": "./extension",
122
123
  "style": "./style"
123
124
  }
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "jl_db_comp",
3
- "version": "0.1.0",
3
+ "version": "0.1.2",
4
4
  "description": "A JupyterLab extension to complete db queries in jupyterlab notebooks",
5
5
  "keywords": [
6
6
  "jupyter",
@@ -60,6 +60,7 @@
60
60
  "@jupyterlab/application": "^4.0.0",
61
61
  "@jupyterlab/completer": "^4.0.0",
62
62
  "@jupyterlab/coreutils": "^6.0.0",
63
+ "@jupyterlab/notebook": "^4.0.0",
63
64
  "@jupyterlab/services": "^7.0.0",
64
65
  "@jupyterlab/settingregistry": "^4.0.0"
65
66
  },
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "jupyter.lab.shortcuts": [],
3
3
  "title": "PostgreSQL Database Completer",
4
- "description": "Settings for PostgreSQL database autocomplete in JupyterLab.",
4
+ "description": "Settings for PostgreSQL database autocomplete in JupyterLab. Uses jupysql connections.ini for database configuration.",
5
5
  "type": "object",
6
6
  "properties": {
7
7
  "enabled": {
@@ -10,10 +10,10 @@
10
10
  "description": "Enable or disable PostgreSQL table and column completions",
11
11
  "default": true
12
12
  },
13
- "databaseUrl": {
13
+ "connectionName": {
14
14
  "type": "string",
15
- "title": "Database URL",
16
- "description": "PostgreSQL connection string (e.g., postgresql://user:password@host:port/dbname). Leave empty to use POSTGRES_URL environment variable.",
15
+ "title": "Connection Name",
16
+ "description": "Connection name from connections.ini file. Leave empty to use the active jupysql connection from the notebook kernel, or the first available connection.",
17
17
  "default": ""
18
18
  },
19
19
  "schema": {
@@ -0,0 +1 @@
1
+ "use strict";(self.webpackChunkjl_db_comp=self.webpackChunkjl_db_comp||[]).push([[171],{171(e,n,t){t.r(n),t.d(n,{default:()=>m});var o=t(206),s=t(428),i=t(490),a=t(830),r=t(469);async function c(e="",n={}){const t=a.ServerConnection.makeSettings(),o=r.URLExt.join(t.baseUrl,"jl-db-comp",e);let s;try{s=await a.ServerConnection.makeRequest(o,n,t)}catch(e){throw new a.ServerConnection.NetworkError(e)}let i=await s.text();if(i.length>0)try{i=JSON.parse(i)}catch(e){console.log("Not a JSON response body.",s)}if(!s.ok)throw new a.ServerConnection.ResponseError(s,i.message||i);return i}class l{constructor(e,n){this.identifier="jl_db_comp:postgres-completer",this.renderer=null,this._cache=new Map,this._cacheTTL=3e5,this._settings=null,this._notebookTracker=null,this._connectionName="",this._schema="public",this._enabled=!0,this._availableConnections=[],this._cachedKernelConfig=null,this._kernelConfigCacheTime=0,this._kernelConfigCacheTTL=3e4,this._sqlKeywords=["select","from","join","where","insert","update","delete","inner","left","right","outer","on","group","order","by","having","into","values","set"],this._notebookTracker=n||null,e&&(this._settings=e,this._loadSettings(),e.changed.connect(()=>{this._loadSettings()})),this._loadAvailableConnections()}_loadSettings(){this._settings&&(this._connectionName=this._settings.get("connectionName").composite,this._schema=this._settings.get("schema").composite,this._enabled=this._settings.get("enabled").composite)}async _loadAvailableConnections(){try{const e=await async function(){try{return await c("connections",{method:"GET"})}catch(e){if(e instanceof a.ServerConnection.ResponseError)console.error(`Failed to fetch connections: ${e.message}`);else{const n=e instanceof Error?e.message:"Unknown error";console.error(`Failed to fetch connections: ${n}`)}return{status:"error",connections:{},filePath:null,message:"Failed to fetch connections"}}}();"success"===e.status&&(this._availableConnections=Object.keys(e.connections))}catch(e){console.warn("Failed to load available connections:",e)}}async _getKernelConfig(){var e;const n=Date.now();if(this._cachedKernelConfig&&n-this._kernelConfigCacheTime<this._kernelConfigCacheTTL)return this._cachedKernelConfig;if(!this._notebookTracker)return null;const t=this._notebookTracker.currentWidget;if(!t)return null;const o=null===(e=t.sessionContext.session)||void 0===e?void 0:e.kernel;if(!o)return null;try{const e=o.requestExecute({code:"\nimport json\nimport os\nresult = {'connection': '', 'dsn_filename': ''}\n\n# Get active connection\ntry:\n from sql.connection import ConnectionManager\n conn = ConnectionManager.current\n if conn:\n for alias, c in ConnectionManager.connections.items():\n if c is conn:\n result['connection'] = alias\n break\nexcept:\n pass\n\n# Get dsn_filename from SqlMagic instance\ndsn_filename = None\ntry:\n from sql.magic import SqlMagic\n ip = get_ipython()\n if ip:\n for name, inst in ip.magics_manager.registry.items():\n if isinstance(inst, SqlMagic):\n dsn_filename = inst.dsn_filename\n break\nexcept:\n pass\n\n# Fallback: try to get from config\nif not dsn_filename:\n try:\n ip = get_ipython()\n if ip and hasattr(ip, 'config'):\n sql_config = ip.config.get('SqlMagic', {})\n if 'dsn_filename' in sql_config:\n dsn_filename = sql_config['dsn_filename']\n except:\n pass\n\n# Convert to absolute path if we have a dsn_filename\nif dsn_filename:\n if not os.path.isabs(dsn_filename):\n # Resolve relative to current working directory\n dsn_filename = os.path.abspath(dsn_filename)\n result['dsn_filename'] = dsn_filename\n\nprint(json.dumps(result))\n",silent:!0,store_history:!1}),t=await new Promise(n=>{let t="";e.onIOPub=e=>{if("stream"===e.header.msg_type){const n=e.content;"stdout"===n.name&&(t+=n.text)}},e.done.then(()=>{try{const e=JSON.parse(t.trim());n({connection:e.connection||"",dsnFilename:e.dsn_filename||""})}catch(e){n(null)}}).catch(()=>{n(null)})});return t&&(this._cachedKernelConfig=t,this._kernelConfigCacheTime=n),t}catch(e){return console.warn("Failed to get jupysql config from kernel:",e),null}}async isApplicable(e){if(!this._enabled)return!1;const n=e.editor;if(!n)return!1;const t=n.model.sharedModel.getSource();if(!t)return!1;const o=t.toLowerCase();return this._sqlKeywords.some(e=>o.includes(e))}async fetch(e,n){var t;if(!this._enabled)return{start:e.offset,end:e.offset,items:[]};const{text:o,offset:s}=e,i=this._extractContext(o,s);let r;if(i.jsonbColumn){const e=(null===(t=i.jsonbPath)||void 0===t?void 0:t.join("."))||"";r=`${i.schemaOrTable?`${i.schemaOrTable}.`:""}${i.jsonbColumn}->${e}.${i.prefix}`.toLowerCase()}else r=i.schema&&i.tableName?`${i.schema}.${i.tableName}.${i.prefix}`.toLowerCase():i.schemaOrTable?`${i.schemaOrTable}.${i.prefix}`.toLowerCase():i.prefix.toLowerCase();const l=this._getCached(r);if(l)return this._formatReply(l,e.offset,i.prefix);try{const n=await this._getKernelConfig();let t,o=this._connectionName;n&&(n.dsnFilename&&(t=n.dsnFilename),!o&&n.connection&&(o=n.connection)),!o&&this._availableConnections.length>0&&(o=this._availableConnections[0]);const s=await async function(e,n="",t="public",o,s,i,r,l){try{const a=new URLSearchParams;e&&a.append("connection",e),l&&a.append("connections_file",l),n&&a.append("prefix",n),a.append("schema",t),o&&a.append("table",o),s&&a.append("schema_or_table",s),i&&(a.append("jsonb_column",i),r&&r.length>0&&a.append("jsonb_path",JSON.stringify(r)));const h=`completions?${a.toString()}`,m=await c(h,{method:"GET"});return"error"===m.status?(console.error("PostgreSQL completion error:",m.message),[]):i&&m.jsonbKeys?m.jsonbKeys:o||s?m.columns.length>0?m.columns:m.tables:[...m.tables,...m.columns]}catch(e){if(e instanceof a.ServerConnection.ResponseError){const n=e.response.status;let t=e.message;"string"==typeof t&&(t.includes("<!DOCTYPE")||t.includes("<html"))&&(t=`HTML error page (${t.substring(0,100)}...)`),console.error(`PostgreSQL completions API failed (${n}): ${t}`)}else{const n=e instanceof Error?e.message:"Unknown error";console.error(`PostgreSQL completions API failed: ${n}`)}return[]}}(o||void 0,i.prefix,i.schema||this._schema,i.tableName,i.schemaOrTable,i.jsonbColumn,i.jsonbPath,t);return this._cache.set(r,{items:s,timestamp:Date.now()}),this._formatReply(s,e.offset,i.prefix)}catch(n){return console.error("Failed to fetch PostgreSQL completions:",n),{start:e.offset,end:e.offset,items:[]}}}_extractContext(e,n){const t=e.substring(0,n);if(t.includes("->")){const e=t.match(/([\w]+\.)?([\w]+)\s*->\s*(.*)$/);if(e){const n=e[1]?e[1].slice(0,-1):void 0,t=e[2],o=e[3],s=[],i=/['"]?([\w]+)['"]?\s*->/g;let a;for(;null!==(a=i.exec(o));)s.push(a[1]);const r=o.lastIndexOf("->");let c="";return c=r>=0?o.substring(r+2).trim().replace(/['"]/g,""):o.trim().replace(/['"]/g,""),{schemaOrTable:n,jsonbColumn:t,jsonbPath:s,prefix:c}}}const o=t.match(/([\w]+)\.([\w]+)\.([\w]*)$/);if(o)return{schema:o[1],tableName:o[2],prefix:o[3]};const s=t.match(/([\w]+)\.([\w]*)$/);if(s)return{schemaOrTable:s[1],prefix:s[2]};const i=t.match(/[\w]+$/),a=i?i[0]:"",r=e.toLowerCase().match(/\bfrom\s+([\w]+\.)?[\w]+/);if(r){const e=r[0].match(/\bfrom\s+(?:([\w]+)\.)?([\w]+)/);if(e){const n=e[1],t=e[2];return n?{schema:n,tableName:t,prefix:a}:{schemaOrTable:t,prefix:a}}}return{prefix:a}}_getCached(e){const n=e.toLowerCase(),t=this._cache.get(n);return t?Date.now()-t.timestamp>this._cacheTTL?(this._cache.delete(n),null):t.items:null}_formatReply(e,n,t){return{start:n-t.length,end:n,items:e.map(e=>{let n=e.name,t=e.name;"jsonb_key"===e.type&&(t=`'${e.name}'`),"column"===e.type&&e.table&&(n=`${e.name} (${e.table})`);let o,s="📊",i=e.name;return"table"===e.type?s="📋":"view"===e.type?s="👁️":"jsonb_key"===e.type&&(s="🔑",i=`0000${e.name}`),"column"===e.type&&e.dataType&&e.table?o=`${e.table}.${e.name}: ${e.dataType}`:"jsonb_key"===e.type&&e.keyPath&&(o=`JSONB key: ${e.keyPath.join(" -> ")}`),{label:`${s} ${n}`,insertText:t,sortText:i,type:e.type,documentation:o}})}}clearCache(){this._cache.clear()}}const h="jl_db_comp:plugin",m={id:h,description:"A JupyterLab extension to complete db queries in jupyterlab notebooks",autoStart:!0,requires:[o.ICompletionProviderManager],optional:[i.ISettingRegistry,s.INotebookTracker],activate:(e,n,t,o)=>{let s;t?t.load(h).then(e=>{s=new l(e,o),n.registerProvider(s),console.log("JupyterLab extension jl_db_comp is activated!")}).catch(e=>{console.error("Failed to load settings for jl_db_comp:",e),s=new l(null,o),n.registerProvider(s),console.log("JupyterLab extension jl_db_comp is activated!")}):(s=new l(null,o),n.registerProvider(s),console.log("JupyterLab extension jl_db_comp is activated!"))}}}}]);
@@ -0,0 +1 @@
1
+ "use strict";(self.webpackChunkjl_db_comp=self.webpackChunkjl_db_comp||[]).push([[728],{56(e,n,t){e.exports=function(e){var n=t.nc;n&&e.setAttribute("nonce",n)}},72(e){var n=[];function t(e){for(var t=-1,r=0;r<n.length;r++)if(n[r].identifier===e){t=r;break}return t}function r(e,r){for(var a={},c=[],i=0;i<e.length;i++){var s=e[i],l=r.base?s[0]+r.base:s[0],u=a[l]||0,p="".concat(l," ").concat(u);a[l]=u+1;var f=t(p),d={css:s[1],media:s[2],sourceMap:s[3],supports:s[4],layer:s[5]};if(-1!==f)n[f].references++,n[f].updater(d);else{var m=o(d,r);r.byIndex=i,n.splice(i,0,{identifier:p,updater:m,references:1})}c.push(p)}return c}function o(e,n){var t=n.domAPI(n);return t.update(e),function(n){if(n){if(n.css===e.css&&n.media===e.media&&n.sourceMap===e.sourceMap&&n.supports===e.supports&&n.layer===e.layer)return;t.update(e=n)}else t.remove()}}e.exports=function(e,o){var a=r(e=e||[],o=o||{});return function(e){e=e||[];for(var c=0;c<a.length;c++){var i=t(a[c]);n[i].references--}for(var s=r(e,o),l=0;l<a.length;l++){var u=t(a[l]);0===n[u].references&&(n[u].updater(),n.splice(u,1))}a=s}}},113(e){e.exports=function(e,n){if(n.styleSheet)n.styleSheet.cssText=e;else{for(;n.firstChild;)n.removeChild(n.firstChild);n.appendChild(document.createTextNode(e))}}},314(e){e.exports=function(e){var n=[];return n.toString=function(){return this.map(function(n){var t="",r=void 0!==n[5];return n[4]&&(t+="@supports (".concat(n[4],") {")),n[2]&&(t+="@media ".concat(n[2]," {")),r&&(t+="@layer".concat(n[5].length>0?" ".concat(n[5]):""," {")),t+=e(n),r&&(t+="}"),n[2]&&(t+="}"),n[4]&&(t+="}"),t}).join("")},n.i=function(e,t,r,o,a){"string"==typeof e&&(e=[[null,e,void 0]]);var c={};if(r)for(var i=0;i<this.length;i++){var s=this[i][0];null!=s&&(c[s]=!0)}for(var l=0;l<e.length;l++){var u=[].concat(e[l]);r&&c[u[0]]||(void 0!==a&&(void 0===u[5]||(u[1]="@layer".concat(u[5].length>0?" ".concat(u[5]):""," {").concat(u[1],"}")),u[5]=a),t&&(u[2]?(u[1]="@media ".concat(u[2]," {").concat(u[1],"}"),u[2]=t):u[2]=t),o&&(u[4]?(u[1]="@supports (".concat(u[4],") {").concat(u[1],"}"),u[4]=o):u[4]="".concat(o)),n.push(u))}},n}},475(e,n,t){t.d(n,{A:()=>i});var r=t(601),o=t.n(r),a=t(314),c=t.n(a)()(o());c.push([e.id,"/*\n See the JupyterLab Developer Guide for useful CSS Patterns:\n\n https://jupyterlab.readthedocs.io/en/stable/developer/css.html\n*/\n\n/* PostgreSQL completion item styling */\n.jp-jl-db-comp-completion-item {\n font-family: var(--jp-code-font-family);\n}\n\n.jp-jl-db-comp-completion-table {\n color: var(--jp-ui-font-color1);\n}\n\n.jp-jl-db-comp-completion-column {\n color: var(--jp-brand-color1);\n}\n\n.jp-jl-db-comp-completion-documentation {\n font-size: var(--jp-code-font-size-small);\n color: var(--jp-ui-font-color2);\n font-style: italic;\n}\n",""]);const i=c},540(e){e.exports=function(e){var n=document.createElement("style");return e.setAttributes(n,e.attributes),e.insert(n,e.options),n}},601(e){e.exports=function(e){return e[1]}},659(e){var n={};e.exports=function(e,t){var r=function(e){if(void 0===n[e]){var t=document.querySelector(e);if(window.HTMLIFrameElement&&t instanceof window.HTMLIFrameElement)try{t=t.contentDocument.head}catch(e){t=null}n[e]=t}return n[e]}(e);if(!r)throw new Error("Couldn't find a style target. This probably means that the value for the 'insert' parameter is invalid.");r.appendChild(t)}},728(e,n,t){var r=t(72),o=t.n(r),a=t(825),c=t.n(a),i=t(659),s=t.n(i),l=t(56),u=t.n(l),p=t(540),f=t.n(p),d=t(113),m=t.n(d),v=t(475),h={};h.styleTagTransform=m(),h.setAttributes=u(),h.insert=s().bind(null,"head"),h.domAPI=c(),h.insertStyleElement=f(),o()(v.A,h),v.A&&v.A.locals&&v.A.locals},825(e){e.exports=function(e){if("undefined"==typeof document)return{update:function(){},remove:function(){}};var n=e.insertStyleElement(e);return{update:function(t){!function(e,n,t){var r="";t.supports&&(r+="@supports (".concat(t.supports,") {")),t.media&&(r+="@media ".concat(t.media," {"));var o=void 0!==t.layer;o&&(r+="@layer".concat(t.layer.length>0?" ".concat(t.layer):""," {")),r+=t.css,o&&(r+="}"),t.media&&(r+="}"),t.supports&&(r+="}");var a=t.sourceMap;a&&"undefined"!=typeof btoa&&(r+="\n/*# sourceMappingURL=data:application/json;base64,".concat(btoa(unescape(encodeURIComponent(JSON.stringify(a))))," */")),n.styleTagTransform(r,e,n.options)}(n,e,t)},remove:function(){!function(e){if(null===e.parentNode)return!1;e.parentNode.removeChild(e)}(n)}}}}}]);
@@ -0,0 +1 @@
1
+ var _JUPYTERLAB;(()=>{"use strict";var e,r,t,n,o,a,i,u,l,f,s,d,p,c,h,v,b,g,m,y={246(e,r,t){var n={"./index":()=>t.e(171).then(()=>()=>t(171)),"./extension":()=>t.e(171).then(()=>()=>t(171)),"./style":()=>t.e(728).then(()=>()=>t(728))},o=(e,r)=>(t.R=r,r=t.o(n,e)?n[e]():Promise.resolve().then(()=>{throw new Error('Module "'+e+'" does not exist in container.')}),t.R=void 0,r),a=(e,r)=>{if(t.S){var n="default",o=t.S[n];if(o&&o!==e)throw new Error("Container initialization failed as it has already been initialized with a different share scope");return t.S[n]=e,t.I(n,r)}};t.d(r,{get:()=>o,init:()=>a})}},w={};function j(e){var r=w[e];if(void 0!==r)return r.exports;var t=w[e]={id:e,exports:{}};return y[e](t,t.exports,j),t.exports}j.m=y,j.c=w,j.n=e=>{var r=e&&e.__esModule?()=>e.default:()=>e;return j.d(r,{a:r}),r},j.d=(e,r)=>{for(var t in r)j.o(r,t)&&!j.o(e,t)&&Object.defineProperty(e,t,{enumerable:!0,get:r[t]})},j.f={},j.e=e=>Promise.all(Object.keys(j.f).reduce((r,t)=>(j.f[t](e,r),r),[])),j.u=e=>e+"."+{171:"d366980651e0db8d978c",728:"6552504d5b9b27551bc5"}[e]+".js?v="+{171:"d366980651e0db8d978c",728:"6552504d5b9b27551bc5"}[e],j.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),j.o=(e,r)=>Object.prototype.hasOwnProperty.call(e,r),e={},r="jl_db_comp:",j.l=(t,n,o,a)=>{if(e[t])e[t].push(n);else{var i,u;if(void 0!==o)for(var l=document.getElementsByTagName("script"),f=0;f<l.length;f++){var s=l[f];if(s.getAttribute("src")==t||s.getAttribute("data-webpack")==r+o){i=s;break}}i||(u=!0,(i=document.createElement("script")).charset="utf-8",j.nc&&i.setAttribute("nonce",j.nc),i.setAttribute("data-webpack",r+o),i.src=t),e[t]=[n];var d=(r,n)=>{i.onerror=i.onload=null,clearTimeout(p);var o=e[t];if(delete e[t],i.parentNode&&i.parentNode.removeChild(i),o&&o.forEach(e=>e(n)),r)return r(n)},p=setTimeout(d.bind(null,void 0,{type:"timeout",target:i}),12e4);i.onerror=d.bind(null,i.onerror),i.onload=d.bind(null,i.onload),u&&document.head.appendChild(i)}},j.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},(()=>{j.S={};var e={},r={};j.I=(t,n)=>{n||(n=[]);var o=r[t];if(o||(o=r[t]={}),!(n.indexOf(o)>=0)){if(n.push(o),e[t])return e[t];j.o(j.S,t)||(j.S[t]={});var a=j.S[t],i="jl_db_comp",u=[];return"default"===t&&((e,r,t,n)=>{var o=a[e]=a[e]||{},u=o[r];(!u||!u.loaded&&(1!=!u.eager?n:i>u.from))&&(o[r]={get:()=>j.e(171).then(()=>()=>j(171)),from:i,eager:!1})})("jl_db_comp","0.1.2"),e[t]=u.length?Promise.all(u).then(()=>e[t]=1):1}}})(),(()=>{var e;j.g.importScripts&&(e=j.g.location+"");var r=j.g.document;if(!e&&r&&(r.currentScript&&"SCRIPT"===r.currentScript.tagName.toUpperCase()&&(e=r.currentScript.src),!e)){var t=r.getElementsByTagName("script");if(t.length)for(var n=t.length-1;n>-1&&(!e||!/^http(s?):/.test(e));)e=t[n--].src}if(!e)throw new Error("Automatic publicPath is not supported in this browser");e=e.replace(/^blob:/,"").replace(/#.*$/,"").replace(/\?.*$/,"").replace(/\/[^\/]+$/,"/"),j.p=e})(),t=e=>{var r=e=>e.split(".").map(e=>+e==e?+e:e),t=/^([^-+]+)?(?:-([^+]+))?(?:\+(.+))?$/.exec(e),n=t[1]?r(t[1]):[];return t[2]&&(n.length++,n.push.apply(n,r(t[2]))),t[3]&&(n.push([]),n.push.apply(n,r(t[3]))),n},n=(e,r)=>{e=t(e),r=t(r);for(var n=0;;){if(n>=e.length)return n<r.length&&"u"!=(typeof r[n])[0];var o=e[n],a=(typeof o)[0];if(n>=r.length)return"u"==a;var i=r[n],u=(typeof i)[0];if(a!=u)return"o"==a&&"n"==u||"s"==u||"u"==a;if("o"!=a&&"u"!=a&&o!=i)return o<i;n++}},o=e=>{var r=e[0],t="";if(1===e.length)return"*";if(r+.5){t+=0==r?">=":-1==r?"<":1==r?"^":2==r?"~":r>0?"=":"!=";for(var n=1,a=1;a<e.length;a++)n--,t+="u"==(typeof(u=e[a]))[0]?"-":(n>0?".":"")+(n=2,u);return t}var i=[];for(a=1;a<e.length;a++){var u=e[a];i.push(0===u?"not("+l()+")":1===u?"("+l()+" || "+l()+")":2===u?i.pop()+" "+i.pop():o(u))}return l();function l(){return i.pop().replace(/^\((.+)\)$/,"$1")}},a=(e,r)=>{if(0 in e){r=t(r);var n=e[0],o=n<0;o&&(n=-n-1);for(var i=0,u=1,l=!0;;u++,i++){var f,s,d=u<e.length?(typeof e[u])[0]:"";if(i>=r.length||"o"==(s=(typeof(f=r[i]))[0]))return!l||("u"==d?u>n&&!o:""==d!=o);if("u"==s){if(!l||"u"!=d)return!1}else if(l)if(d==s)if(u<=n){if(f!=e[u])return!1}else{if(o?f>e[u]:f<e[u])return!1;f!=e[u]&&(l=!1)}else if("s"!=d&&"n"!=d){if(o||u<=n)return!1;l=!1,u--}else{if(u<=n||s<d!=o)return!1;l=!1}else"s"!=d&&"n"!=d&&(l=!1,u--)}}var p=[],c=p.pop.bind(p);for(i=1;i<e.length;i++){var h=e[i];p.push(1==h?c()|c():2==h?c()&c():h?a(h,r):!c())}return!!c()},i=(e,r)=>e&&j.o(e,r),u=e=>(e.loaded=1,e.get()),l=e=>Object.keys(e).reduce((r,t)=>(e[t].eager&&(r[t]=e[t]),r),{}),f=(e,r,t)=>{var o=t?l(e[r]):e[r];return Object.keys(o).reduce((e,r)=>!e||!o[e].loaded&&n(e,r)?r:e,0)},s=(e,r,t,n)=>"Unsatisfied version "+t+" from "+(t&&e[r][t].from)+" of shared singleton module "+r+" (required "+o(n)+")",d=e=>{throw new Error(e)},p=e=>{"undefined"!=typeof console&&console.warn&&console.warn(e)},c=(e,r,t)=>t?t():((e,r)=>d("Shared module "+r+" doesn't exist in shared scope "+e))(e,r),h=(e=>function(r,t,n,o,a){var i=j.I(r);return i&&i.then&&!n?i.then(e.bind(e,r,j.S[r],t,!1,o,a)):e(r,j.S[r],t,n,o,a)})((e,r,t,n,o,l)=>{if(!i(r,t))return c(e,t,l);var d=f(r,t,n);return a(o,d)||p(s(r,t,d,o)),u(r[t][d])}),v={},b={206:()=>h("default","@jupyterlab/completer",!1,[1,4,5,2]),428:()=>h("default","@jupyterlab/notebook",!1,[1,4,5,2]),469:()=>h("default","@jupyterlab/coreutils",!1,[1,6,5,2]),490:()=>h("default","@jupyterlab/settingregistry",!1,[1,4,5,2]),830:()=>h("default","@jupyterlab/services",!1,[1,7,5,2])},g={171:[206,428,469,490,830]},m={},j.f.consumes=(e,r)=>{j.o(g,e)&&g[e].forEach(e=>{if(j.o(v,e))return r.push(v[e]);if(!m[e]){var t=r=>{v[e]=0,j.m[e]=t=>{delete j.c[e],t.exports=r()}};m[e]=!0;var n=r=>{delete v[e],j.m[e]=t=>{throw delete j.c[e],r}};try{var o=b[e]();o.then?r.push(v[e]=o.then(t).catch(n)):t(o)}catch(e){n(e)}}})},(()=>{var e={248:0};j.f.j=(r,t)=>{var n=j.o(e,r)?e[r]:void 0;if(0!==n)if(n)t.push(n[2]);else{var o=new Promise((t,o)=>n=e[r]=[t,o]);t.push(n[2]=o);var a=j.p+j.u(r),i=new Error;j.l(a,t=>{if(j.o(e,r)&&(0!==(n=e[r])&&(e[r]=void 0),n)){var o=t&&("load"===t.type?"missing":t.type),a=t&&t.target&&t.target.src;i.message="Loading chunk "+r+" failed.\n("+o+": "+a+")",i.name="ChunkLoadError",i.type=o,i.request=a,n[1](i)}},"chunk-"+r,r)}};var r=(r,t)=>{var n,o,[a,i,u]=t,l=0;if(a.some(r=>0!==e[r])){for(n in i)j.o(i,n)&&(j.m[n]=i[n]);u&&u(j)}for(r&&r(t);l<a.length;l++)o=a[l],j.o(e,o)&&e[o]&&e[o][0](),e[o]=0},t=self.webpackChunkjl_db_comp=self.webpackChunkjl_db_comp||[];t.forEach(r.bind(null,0)),t.push=r.bind(null,t.push.bind(t))})(),j.nc=void 0;var S=j(246);(_JUPYTERLAB=void 0===_JUPYTERLAB?{}:_JUPYTERLAB).jl_db_comp=S})();
@@ -0,0 +1,16 @@
1
+ {
2
+ "packages": [
3
+ {
4
+ "name": "css-loader",
5
+ "versionInfo": "6.11.0",
6
+ "licenseId": "MIT",
7
+ "extractedText": "Copyright JS Foundation and other contributors\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n"
8
+ },
9
+ {
10
+ "name": "style-loader",
11
+ "versionInfo": "3.3.4",
12
+ "licenseId": "MIT",
13
+ "extractedText": "Copyright JS Foundation and other contributors\n\nPermission is hereby granted, free of charge, to any person obtaining\na copy of this software and associated documentation files (the\n'Software'), to deal in the Software without restriction, including\nwithout limitation the rights to use, copy, modify, merge, publish,\ndistribute, sublicense, and/or sell copies of the Software, and to\npermit persons to whom the Software is furnished to do so, subject to\nthe following conditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.\nIN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,\nTORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\nSOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n"
14
+ }
15
+ ]
16
+ }