signalpilot-ai-internal 0.4.5__py3-none-any.whl → 0.4.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of signalpilot-ai-internal might be problematic. Click here for more details.
- signalpilot_ai_internal/_version.py +1 -1
- signalpilot_ai_internal/handlers.py +242 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/package.json +2 -2
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/package.json.orig +1 -1
- signalpilot_ai_internal-0.4.6.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/839.0be091400e2f58fe8fd2.js +1 -0
- signalpilot_ai_internal-0.4.6.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/923.e80ae4c5cedc1d73f2a1.js +1 -0
- signalpilot_ai_internal-0.4.5.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.2304af1dc768da3716f9.js → signalpilot_ai_internal-0.4.6.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/remoteEntry.61ef7b3a050c161d7a88.js +1 -1
- {signalpilot_ai_internal-0.4.5.dist-info → signalpilot_ai_internal-0.4.6.dist-info}/METADATA +1 -1
- {signalpilot_ai_internal-0.4.5.dist-info → signalpilot_ai_internal-0.4.6.dist-info}/RECORD +38 -38
- signalpilot_ai_internal-0.4.5.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/447.1e78c4216aeaaeadff40.js +0 -1
- signalpilot_ai_internal-0.4.5.data/data/share/jupyter/labextensions/signalpilot-ai-internal/static/839.be52ed152c5de2006fde.js +0 -1
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/etc/jupyter/jupyter_server_config.d/signalpilot_ai.json +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/install.json +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/schemas/signalpilot-ai-internal/plugin.json +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/104.04e170724f369fcbaf19.js +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/104.04e170724f369fcbaf19.js.LICENSE.txt +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/122.e2dadf63dc64d7b5f1ee.js +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/220.328403b5545f268b95c6.js +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/262.726e1da31a50868cb297.js +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/280.35d8c8b68815702a5238.js +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/280.35d8c8b68815702a5238.js.LICENSE.txt +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/353.72484b768a04f89bd3dd.js +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/364.dbec4c2dc12e7b050dcc.js +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/384.fa432bdb7fb6b1c95ad6.js +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/439.37e271d7a80336daabe2.js +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/476.9b4f05a99f5003f82094.js +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/481.73c7a9290b7d35a8b9c1.js +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/512.b58fc0093d080b8ee61c.js +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/553.b4042a795c91d9ff71ef.js.LICENSE.txt +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/606.90aaaae46b73dc3c08fb.js +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/635.9720593ee20b768da3ca.js +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/713.8e6edc9a965bdd578ca7.js +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/742.91e7b516c8699eea3373.js +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/785.3aa564fc148b37d1d719.js +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/888.34054db17bcf6e87ec95.js +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/style.js +0 -0
- {signalpilot_ai_internal-0.4.5.data → signalpilot_ai_internal-0.4.6.data}/data/share/jupyter/labextensions/signalpilot-ai-internal/static/third-party-licenses.json +0 -0
- {signalpilot_ai_internal-0.4.5.dist-info → signalpilot_ai_internal-0.4.6.dist-info}/WHEEL +0 -0
- {signalpilot_ai_internal-0.4.5.dist-info → signalpilot_ai_internal-0.4.6.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,4 +1,8 @@
|
|
|
1
1
|
import json
|
|
2
|
+
import os
|
|
3
|
+
import re
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from datetime import datetime
|
|
2
6
|
|
|
3
7
|
from jupyter_server.base.handlers import APIHandler
|
|
4
8
|
from jupyter_server.utils import url_path_join
|
|
@@ -22,6 +26,237 @@ class HelloWorldHandler(APIHandler):
|
|
|
22
26
|
}))
|
|
23
27
|
|
|
24
28
|
|
|
29
|
+
class ReadAllFilesHandler(APIHandler):
|
|
30
|
+
"""Handler for reading all notebook and data files in the workspace"""
|
|
31
|
+
|
|
32
|
+
# Common data file extensions
|
|
33
|
+
DATA_EXTENSIONS = {'.csv', '.json', '.xlsx', '.xls', '.parquet', '.pkl', '.pickle',
|
|
34
|
+
'.feather', '.hdf5', '.h5', '.sql', '.db', '.sqlite', '.tsv', '.txt'}
|
|
35
|
+
|
|
36
|
+
# Directories to exclude from search
|
|
37
|
+
EXCLUDE_DIRS = {'.git', '.ipynb_checkpoints', 'node_modules', '__pycache__',
|
|
38
|
+
'.venv', 'venv', 'env', '.pytest_cache', '.mypy_cache',
|
|
39
|
+
'dist', 'build', '.tox', 'logs', '.vscode'}
|
|
40
|
+
|
|
41
|
+
@tornado.web.authenticated
|
|
42
|
+
def get(self):
|
|
43
|
+
try:
|
|
44
|
+
# Get the root directory where Jupyter Lab is running
|
|
45
|
+
root_dir = Path(os.getcwd())
|
|
46
|
+
|
|
47
|
+
# Find all notebook files
|
|
48
|
+
notebooks = self._find_notebooks(root_dir)
|
|
49
|
+
|
|
50
|
+
# Find all data files
|
|
51
|
+
data_files = self._find_data_files(root_dir)
|
|
52
|
+
|
|
53
|
+
# Get the 10 most recently edited notebooks
|
|
54
|
+
recent_notebooks = self._get_recent_notebooks(notebooks, limit=10)
|
|
55
|
+
|
|
56
|
+
# Analyze each notebook for data dependencies
|
|
57
|
+
notebook_info = []
|
|
58
|
+
all_data_dependencies = set()
|
|
59
|
+
for notebook_path in recent_notebooks:
|
|
60
|
+
info = self._analyze_notebook(notebook_path, data_files, root_dir)
|
|
61
|
+
notebook_info.append(info)
|
|
62
|
+
# Collect all data dependencies from recent notebooks
|
|
63
|
+
all_data_dependencies.update(info['data_dependencies'])
|
|
64
|
+
|
|
65
|
+
# Filter data files to only those referenced by recent notebooks
|
|
66
|
+
referenced_data_files = []
|
|
67
|
+
for data_file in data_files:
|
|
68
|
+
rel_path = str(data_file.relative_to(root_dir))
|
|
69
|
+
rel_path_forward = rel_path.replace('\\', '/')
|
|
70
|
+
file_name = data_file.name
|
|
71
|
+
|
|
72
|
+
# Check if this data file is referenced in any dependency
|
|
73
|
+
if any(dep in [file_name, rel_path, rel_path_forward] or
|
|
74
|
+
file_name in dep or rel_path in dep or rel_path_forward in dep
|
|
75
|
+
for dep in all_data_dependencies):
|
|
76
|
+
referenced_data_files.append(data_file)
|
|
77
|
+
|
|
78
|
+
# Generate the LLM-optimized context string with only referenced data
|
|
79
|
+
welcome_context = self._generate_welcome_context(notebook_info, referenced_data_files, root_dir)
|
|
80
|
+
|
|
81
|
+
self.finish(json.dumps({
|
|
82
|
+
"welcome_context": welcome_context,
|
|
83
|
+
"notebook_count": len(notebooks),
|
|
84
|
+
"data_file_count": len(data_files),
|
|
85
|
+
"recent_notebook_count": len(recent_notebooks),
|
|
86
|
+
"referenced_data_count": len(referenced_data_files)
|
|
87
|
+
}))
|
|
88
|
+
|
|
89
|
+
except Exception as e:
|
|
90
|
+
self.set_status(500)
|
|
91
|
+
self.finish(json.dumps({
|
|
92
|
+
"error": str(e)
|
|
93
|
+
}))
|
|
94
|
+
|
|
95
|
+
def _find_notebooks(self, root_dir: Path) -> list:
|
|
96
|
+
"""Find all .ipynb files in the workspace"""
|
|
97
|
+
notebooks = []
|
|
98
|
+
for path in root_dir.rglob('*.ipynb'):
|
|
99
|
+
# Skip excluded directories
|
|
100
|
+
if any(excluded in path.parts for excluded in self.EXCLUDE_DIRS):
|
|
101
|
+
continue
|
|
102
|
+
notebooks.append(path)
|
|
103
|
+
return notebooks
|
|
104
|
+
|
|
105
|
+
def _find_data_files(self, root_dir: Path) -> list:
|
|
106
|
+
"""Find all data files in the workspace"""
|
|
107
|
+
data_files = []
|
|
108
|
+
for path in root_dir.rglob('*'):
|
|
109
|
+
# Skip excluded directories
|
|
110
|
+
if any(excluded in path.parts for excluded in self.EXCLUDE_DIRS):
|
|
111
|
+
continue
|
|
112
|
+
# Check if file has a data extension
|
|
113
|
+
if path.is_file() and path.suffix.lower() in self.DATA_EXTENSIONS:
|
|
114
|
+
data_files.append(path)
|
|
115
|
+
return data_files
|
|
116
|
+
|
|
117
|
+
def _get_recent_notebooks(self, notebooks: list, limit: int = 10) -> list:
|
|
118
|
+
"""Get the most recently modified notebooks"""
|
|
119
|
+
# Sort by modification time (most recent first)
|
|
120
|
+
notebooks_with_mtime = [(nb, nb.stat().st_mtime) for nb in notebooks]
|
|
121
|
+
notebooks_with_mtime.sort(key=lambda x: x[1], reverse=True)
|
|
122
|
+
|
|
123
|
+
# Return only the paths, limited to the specified number
|
|
124
|
+
return [nb for nb, _ in notebooks_with_mtime[:limit]]
|
|
125
|
+
|
|
126
|
+
def _analyze_notebook(self, notebook_path: Path, data_files: list, root_dir: Path) -> dict:
|
|
127
|
+
"""Analyze a notebook to find data dependencies"""
|
|
128
|
+
try:
|
|
129
|
+
with open(notebook_path, 'r', encoding='utf-8') as f:
|
|
130
|
+
notebook_content = f.read()
|
|
131
|
+
|
|
132
|
+
# Find data file references in the notebook
|
|
133
|
+
referenced_data_files = self._find_data_references(notebook_content, data_files, root_dir)
|
|
134
|
+
|
|
135
|
+
# Get relative path from root
|
|
136
|
+
relative_path = notebook_path.relative_to(root_dir)
|
|
137
|
+
|
|
138
|
+
# Get last modified time
|
|
139
|
+
mtime = datetime.fromtimestamp(notebook_path.stat().st_mtime)
|
|
140
|
+
|
|
141
|
+
return {
|
|
142
|
+
'name': notebook_path.name,
|
|
143
|
+
'path': str(relative_path),
|
|
144
|
+
'last_modified': mtime.strftime('%Y-%m-%d %H:%M:%S'),
|
|
145
|
+
'data_dependencies': referenced_data_files
|
|
146
|
+
}
|
|
147
|
+
except Exception as e:
|
|
148
|
+
# If we can't read the notebook, return basic info
|
|
149
|
+
relative_path = notebook_path.relative_to(root_dir)
|
|
150
|
+
return {
|
|
151
|
+
'name': notebook_path.name,
|
|
152
|
+
'path': str(relative_path),
|
|
153
|
+
'last_modified': 'unknown',
|
|
154
|
+
'data_dependencies': [],
|
|
155
|
+
'error': str(e)
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
def _find_data_references(self, content: str, data_files: list, root_dir: Path) -> list:
|
|
159
|
+
"""Find references to data files in notebook content"""
|
|
160
|
+
referenced_files = []
|
|
161
|
+
|
|
162
|
+
# Create a set of data file names and paths for matching
|
|
163
|
+
data_file_patterns = set()
|
|
164
|
+
for data_file in data_files:
|
|
165
|
+
# Add the filename
|
|
166
|
+
data_file_patterns.add(data_file.name)
|
|
167
|
+
# Add relative path
|
|
168
|
+
try:
|
|
169
|
+
rel_path = str(data_file.relative_to(root_dir))
|
|
170
|
+
data_file_patterns.add(rel_path)
|
|
171
|
+
# Also add with forward slashes (common in code)
|
|
172
|
+
data_file_patterns.add(rel_path.replace('\\', '/'))
|
|
173
|
+
except ValueError:
|
|
174
|
+
pass
|
|
175
|
+
|
|
176
|
+
# Search for data file references
|
|
177
|
+
# Common patterns: pd.read_csv('file.csv'), open('file.csv'), 'path/to/file.csv'
|
|
178
|
+
patterns = [
|
|
179
|
+
r'["\']([^"\']+\.(?:csv|json|xlsx?|parquet|pkl|pickle|feather|hdf5|h5|sql|db|sqlite|tsv|txt))["\']',
|
|
180
|
+
r'read_(?:csv|json|excel|parquet|pickle|feather|hdf|sql|table)\(["\']([^"\']+)["\']',
|
|
181
|
+
r'to_(?:csv|json|excel|parquet|pickle|feather|hdf|sql)\(["\']([^"\']+)["\']',
|
|
182
|
+
]
|
|
183
|
+
|
|
184
|
+
found_references = set()
|
|
185
|
+
for pattern in patterns:
|
|
186
|
+
matches = re.finditer(pattern, content, re.IGNORECASE)
|
|
187
|
+
for match in matches:
|
|
188
|
+
file_ref = match.group(1)
|
|
189
|
+
# Check if this reference matches any of our data files
|
|
190
|
+
if file_ref in data_file_patterns or any(file_ref in str(df) for df in data_files):
|
|
191
|
+
found_references.add(file_ref)
|
|
192
|
+
|
|
193
|
+
# Also check for database connection strings
|
|
194
|
+
db_patterns = [
|
|
195
|
+
r'(?:postgresql|mysql|sqlite|mongodb)://[^\s\'"]+',
|
|
196
|
+
r'(?:DATABASE_URL|DB_URL|CONNECTION_STRING)\s*=\s*["\']([^"\']+)["\']'
|
|
197
|
+
]
|
|
198
|
+
|
|
199
|
+
for pattern in db_patterns:
|
|
200
|
+
matches = re.finditer(pattern, content, re.IGNORECASE)
|
|
201
|
+
for match in matches:
|
|
202
|
+
found_references.add(f"Database: {match.group(0)[:50]}...")
|
|
203
|
+
|
|
204
|
+
return sorted(list(found_references))
|
|
205
|
+
|
|
206
|
+
def _generate_welcome_context(self, notebook_info: list, data_files: list, root_dir: Path) -> str:
|
|
207
|
+
"""Generate an LLM-optimized, human-readable context string"""
|
|
208
|
+
lines = []
|
|
209
|
+
lines.append("# Workspace Overview\n")
|
|
210
|
+
|
|
211
|
+
if not notebook_info:
|
|
212
|
+
lines.append("No notebooks found in the workspace.\n")
|
|
213
|
+
else:
|
|
214
|
+
lines.append(f"## Recent Notebooks ({len(notebook_info)})\n")
|
|
215
|
+
|
|
216
|
+
for i, info in enumerate(notebook_info, 1):
|
|
217
|
+
lines.append(f"\n### {i}. {info['name']}")
|
|
218
|
+
lines.append(f" - Path: {info['path']}")
|
|
219
|
+
lines.append(f" - Last Modified: {info['last_modified']}")
|
|
220
|
+
|
|
221
|
+
if info.get('error'):
|
|
222
|
+
lines.append(f" - Note: Could not fully analyze ({info['error']})")
|
|
223
|
+
|
|
224
|
+
if info['data_dependencies']:
|
|
225
|
+
lines.append(f" - Data Dependencies:")
|
|
226
|
+
for dep in info['data_dependencies']:
|
|
227
|
+
lines.append(f" • {dep}")
|
|
228
|
+
else:
|
|
229
|
+
lines.append(f" - Data Dependencies: None detected")
|
|
230
|
+
|
|
231
|
+
# Add summary of data files referenced by recent notebooks
|
|
232
|
+
if data_files:
|
|
233
|
+
lines.append(f"\n## Data Files Referenced by Recent Notebooks ({len(data_files)} total)\n")
|
|
234
|
+
|
|
235
|
+
# Group by extension
|
|
236
|
+
by_extension = {}
|
|
237
|
+
for df in data_files:
|
|
238
|
+
ext = df.suffix.lower()
|
|
239
|
+
if ext not in by_extension:
|
|
240
|
+
by_extension[ext] = []
|
|
241
|
+
try:
|
|
242
|
+
rel_path = str(df.relative_to(root_dir))
|
|
243
|
+
by_extension[ext].append(rel_path)
|
|
244
|
+
except ValueError:
|
|
245
|
+
by_extension[ext].append(str(df))
|
|
246
|
+
|
|
247
|
+
for ext in sorted(by_extension.keys()):
|
|
248
|
+
files = by_extension[ext]
|
|
249
|
+
lines.append(f"\n### {ext} files ({len(files)})")
|
|
250
|
+
# Show all referenced files (they should be limited already)
|
|
251
|
+
for f in sorted(files):
|
|
252
|
+
lines.append(f" - {f}")
|
|
253
|
+
else:
|
|
254
|
+
lines.append(f"\n## Data Files Referenced by Recent Notebooks\n")
|
|
255
|
+
lines.append("No data file dependencies found in recent notebooks.\n")
|
|
256
|
+
|
|
257
|
+
return '\n'.join(lines)
|
|
258
|
+
|
|
259
|
+
|
|
25
260
|
def setup_handlers(web_app):
|
|
26
261
|
host_pattern = ".*$"
|
|
27
262
|
base_url = web_app.settings["base_url"]
|
|
@@ -29,6 +264,9 @@ def setup_handlers(web_app):
|
|
|
29
264
|
# Original hello world endpoint
|
|
30
265
|
hello_route = url_path_join(base_url, "signalpilot-ai-internal", "hello-world")
|
|
31
266
|
|
|
267
|
+
# Read all files endpoint
|
|
268
|
+
read_all_files_route = url_path_join(base_url, "signalpilot-ai-internal", "read-all-files")
|
|
269
|
+
|
|
32
270
|
# Cache service endpoints
|
|
33
271
|
chat_histories_route = url_path_join(base_url, "signalpilot-ai-internal", "cache", "chat-histories")
|
|
34
272
|
chat_history_route = url_path_join(base_url, "signalpilot-ai-internal", "cache", "chat-histories", "([^/]+)")
|
|
@@ -54,6 +292,9 @@ def setup_handlers(web_app):
|
|
|
54
292
|
# Original endpoint
|
|
55
293
|
(hello_route, HelloWorldHandler),
|
|
56
294
|
|
|
295
|
+
# Read all files endpoint
|
|
296
|
+
(read_all_files_route, ReadAllFilesHandler),
|
|
297
|
+
|
|
57
298
|
# Chat histories endpoints
|
|
58
299
|
(chat_histories_route, ChatHistoriesHandler),
|
|
59
300
|
(chat_history_route, ChatHistoriesHandler),
|
|
@@ -90,6 +331,7 @@ def setup_handlers(web_app):
|
|
|
90
331
|
|
|
91
332
|
print("SignalPilot AI backend handlers registered:")
|
|
92
333
|
print(f" - Hello World: {hello_route}")
|
|
334
|
+
print(f" - Read All Files: {read_all_files_route}")
|
|
93
335
|
print(f" - Chat Histories: {chat_histories_route}")
|
|
94
336
|
print(f" - Chat History (by ID): {chat_history_route}")
|
|
95
337
|
print(f" - App Values: {app_values_route}")
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "signalpilot-ai-internal",
|
|
3
|
-
"version": "0.4.
|
|
3
|
+
"version": "0.4.6",
|
|
4
4
|
"description": "SignalPilot Agent - Your Jupyter Notebook Assistant",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"jupyter",
|
|
@@ -133,7 +133,7 @@
|
|
|
133
133
|
"outputDir": "signalpilot_ai_internal/labextension",
|
|
134
134
|
"schemaDir": "schema",
|
|
135
135
|
"_build": {
|
|
136
|
-
"load": "static/remoteEntry.
|
|
136
|
+
"load": "static/remoteEntry.61ef7b3a050c161d7a88.js",
|
|
137
137
|
"extension": "./extension",
|
|
138
138
|
"style": "./style"
|
|
139
139
|
}
|