llms-py 3.0.25__py3-none-any.whl → 3.0.27__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llms/extensions/computer/filesystem.py +16 -1
- llms/extensions/core_tools/__init__.py +1 -169
- llms/extensions/github_auth/README.md +3 -3
- llms/extensions/github_auth/__init__.py +18 -1
- llms/extensions/providers/anthropic.py +1 -1
- llms/extensions/providers/google.py +1 -1
- llms/extensions/providers/nvidia.py +1 -1
- llms/extensions/skills/__init__.py +137 -138
- llms/extensions/skills/ui/index.mjs +20 -13
- llms/llms.json +1 -0
- llms/main.py +54 -5
- llms/ui/ai.mjs +1 -1
- {llms_py-3.0.25.dist-info → llms_py-3.0.27.dist-info}/METADATA +1 -1
- {llms_py-3.0.25.dist-info → llms_py-3.0.27.dist-info}/RECORD +18 -18
- {llms_py-3.0.25.dist-info → llms_py-3.0.27.dist-info}/WHEEL +0 -0
- {llms_py-3.0.25.dist-info → llms_py-3.0.27.dist-info}/entry_points.txt +0 -0
- {llms_py-3.0.25.dist-info → llms_py-3.0.27.dist-info}/licenses/LICENSE +0 -0
- {llms_py-3.0.25.dist-info → llms_py-3.0.27.dist-info}/top_level.txt +0 -0
|
@@ -496,15 +496,20 @@ def move_file(source: Annotated[str, "Source path"], destination: Annotated[str,
|
|
|
496
496
|
|
|
497
497
|
|
|
498
498
|
def search_files(
|
|
499
|
-
path: Annotated[str, "Path to search in"],
|
|
500
499
|
pattern: Annotated[str, "Glob pattern to match"],
|
|
500
|
+
path: Annotated[str, "Path to search in"] = None,
|
|
501
501
|
exclude_patterns: Annotated[List[str], "Glob patterns to exclude"] = None,
|
|
502
|
+
sort_by: Annotated[Literal["path", "modified", "size"], "Sort by path, modified or size"] = "path",
|
|
503
|
+
max_results: int = 200,
|
|
502
504
|
) -> str:
|
|
503
505
|
"""
|
|
504
506
|
Recursively search for files and directories matching a pattern. The patterns should be glob-style patterns that match paths relative to the working directory.
|
|
505
507
|
Use pattern like '.ext' to match files in current directory, and '**/.ext' to match files in all subdirectories.
|
|
506
508
|
Returns full paths to all matching items. Great for finding files when you don't know their exact location. Only searches within allowed directories.
|
|
509
|
+
If no path is provided, searches in the first allowed directory.
|
|
507
510
|
"""
|
|
511
|
+
if not path:
|
|
512
|
+
path = get_allowed_directories()[0]
|
|
508
513
|
valid_path = _validate_path(path)
|
|
509
514
|
results = []
|
|
510
515
|
if exclude_patterns is None:
|
|
@@ -533,6 +538,16 @@ def search_files(
|
|
|
533
538
|
except Exception as e:
|
|
534
539
|
raise RuntimeError(f"Error searching files in {valid_path}: {e}") from e
|
|
535
540
|
|
|
541
|
+
if sort_by == "size":
|
|
542
|
+
results.sort(key=lambda p: os.path.getsize(p) if os.path.exists(p) else 0, reverse=True)
|
|
543
|
+
elif sort_by == "modified":
|
|
544
|
+
results.sort(key=lambda p: os.path.getmtime(p) if os.path.exists(p) else 0, reverse=True)
|
|
545
|
+
else: # path
|
|
546
|
+
results.sort()
|
|
547
|
+
|
|
548
|
+
if max_results > 0:
|
|
549
|
+
results = results[:max_results]
|
|
550
|
+
|
|
536
551
|
if not results:
|
|
537
552
|
return "No matches found"
|
|
538
553
|
|
|
@@ -4,8 +4,6 @@ Core System Tools providing essential file operations, memory persistence, math
|
|
|
4
4
|
|
|
5
5
|
import ast
|
|
6
6
|
import contextlib
|
|
7
|
-
import glob
|
|
8
|
-
import json
|
|
9
7
|
import math
|
|
10
8
|
import operator
|
|
11
9
|
import os
|
|
@@ -21,165 +19,6 @@ from aiohttp import web
|
|
|
21
19
|
|
|
22
20
|
g_ctx = None
|
|
23
21
|
|
|
24
|
-
# -----------------------------
|
|
25
|
-
# In-memory storage (replace later)
|
|
26
|
-
# -----------------------------
|
|
27
|
-
|
|
28
|
-
_MEMORY_STORE: Dict[str, Any] = {}
|
|
29
|
-
_SEMANTIC_STORE: List[Dict[str, Any]] = [] # {id, text, metadata}
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
# -----------------------------
|
|
33
|
-
# Memory tools
|
|
34
|
-
# -----------------------------
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
def memory_read(key: str) -> Any:
|
|
38
|
-
"""Read a value from persistent memory."""
|
|
39
|
-
return _MEMORY_STORE.get(key)
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
def memory_write(key: str, value: Any) -> bool:
|
|
43
|
-
"""Write a value to persistent memory."""
|
|
44
|
-
_MEMORY_STORE[key] = value
|
|
45
|
-
return True
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
# -----------------------------
|
|
49
|
-
# Path safety helpers
|
|
50
|
-
# -----------------------------
|
|
51
|
-
|
|
52
|
-
# Limit tools to only access files and folders within LLMS_BASE_DIR if specified, otherwise the current working directory
|
|
53
|
-
_BASE_DIR = os.environ.get("LLMS_BASE_DIR") or os.path.realpath(os.getcwd())
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
def _resolve_safe_path(path: str) -> str:
|
|
57
|
-
"""
|
|
58
|
-
Resolve a path and ensure it stays within the current working directory.
|
|
59
|
-
Raises ValueError if the path escapes the base directory.
|
|
60
|
-
"""
|
|
61
|
-
resolved = os.path.realpath(os.path.join(_BASE_DIR, path))
|
|
62
|
-
if not resolved.startswith(_BASE_DIR + os.sep) and resolved != _BASE_DIR:
|
|
63
|
-
raise ValueError("Access denied: path is outside the working directory")
|
|
64
|
-
return resolved
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
# -----------------------------
|
|
68
|
-
# Semantic search (placeholder)
|
|
69
|
-
# -----------------------------
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
def semantic_search(query: str, top_k: int = 5) -> List[Dict[str, Any]]:
|
|
73
|
-
"""
|
|
74
|
-
Naive semantic search placeholder.
|
|
75
|
-
Replace with embeddings + vector DB.
|
|
76
|
-
"""
|
|
77
|
-
results = []
|
|
78
|
-
for item in _SEMANTIC_STORE:
|
|
79
|
-
if query.lower() in item["text"].lower():
|
|
80
|
-
results.append(item)
|
|
81
|
-
return results[:top_k]
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
# -----------------------------
|
|
85
|
-
# File system tools (restricted to CWD)
|
|
86
|
-
# -----------------------------
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
def read_file(path: str) -> str:
|
|
90
|
-
"""Read a text file from disk within the current working directory."""
|
|
91
|
-
safe_path = _resolve_safe_path(path)
|
|
92
|
-
with open(safe_path, encoding="utf-8") as f:
|
|
93
|
-
return f.read()
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
def write_file(path: str, content: str) -> bool:
|
|
97
|
-
"""Write text to a file within the current working directory (overwrites)."""
|
|
98
|
-
safe_path = _resolve_safe_path(path)
|
|
99
|
-
os.makedirs(os.path.dirname(safe_path) or _BASE_DIR, exist_ok=True)
|
|
100
|
-
with open(safe_path, "w", encoding="utf-8") as f:
|
|
101
|
-
f.write(content)
|
|
102
|
-
return True
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
def list_directory(path: str) -> str:
|
|
106
|
-
"""List directory contents"""
|
|
107
|
-
safe_path = _resolve_safe_path(path)
|
|
108
|
-
if not os.path.exists(safe_path):
|
|
109
|
-
return f"Error: Path not found: {path}"
|
|
110
|
-
|
|
111
|
-
entries = []
|
|
112
|
-
try:
|
|
113
|
-
for entry in os.scandir(safe_path):
|
|
114
|
-
stat = entry.stat()
|
|
115
|
-
entries.append(
|
|
116
|
-
{
|
|
117
|
-
"name": "/" + entry.name if entry.is_dir() else entry.name,
|
|
118
|
-
"size": stat.st_size,
|
|
119
|
-
"mtime": datetime.fromtimestamp(stat.st_mtime).isoformat(),
|
|
120
|
-
}
|
|
121
|
-
)
|
|
122
|
-
return json.dumps({"path": os.path.relpath(safe_path, _BASE_DIR), "entries": entries}, indent=2)
|
|
123
|
-
except Exception as e:
|
|
124
|
-
return f"Error listing directory: {e}"
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
def glob_paths(
|
|
128
|
-
pattern: str,
|
|
129
|
-
extensions: Optional[List[str]] = None,
|
|
130
|
-
sort_by: str = "path", # "path" | "modified" | "size"
|
|
131
|
-
max_results: int = 100,
|
|
132
|
-
) -> Dict[str, List[Dict[str, str]]]:
|
|
133
|
-
"""
|
|
134
|
-
Find files and directories matching a glob pattern
|
|
135
|
-
"""
|
|
136
|
-
if sort_by not in {"path", "modified", "size"}:
|
|
137
|
-
raise ValueError("sort_by must be one of: path, modified, size")
|
|
138
|
-
|
|
139
|
-
safe_pattern = _resolve_safe_path(pattern)
|
|
140
|
-
|
|
141
|
-
results = []
|
|
142
|
-
|
|
143
|
-
for path in glob.glob(safe_pattern, recursive=True):
|
|
144
|
-
resolved = os.path.realpath(path)
|
|
145
|
-
|
|
146
|
-
# Enforce CWD restriction (important for symlinks)
|
|
147
|
-
if not resolved.startswith(_BASE_DIR):
|
|
148
|
-
continue
|
|
149
|
-
|
|
150
|
-
is_dir = os.path.isdir(resolved)
|
|
151
|
-
|
|
152
|
-
# Extension filtering (files only)
|
|
153
|
-
if extensions and not is_dir:
|
|
154
|
-
ext = os.path.splitext(resolved)[1].lower().lstrip(".")
|
|
155
|
-
if ext not in {e.lower().lstrip(".") for e in extensions}:
|
|
156
|
-
continue
|
|
157
|
-
|
|
158
|
-
stat = os.stat(resolved)
|
|
159
|
-
|
|
160
|
-
results.append(
|
|
161
|
-
{
|
|
162
|
-
"path": os.path.relpath(resolved, _BASE_DIR),
|
|
163
|
-
"type": "directory" if is_dir else "file",
|
|
164
|
-
"size_bytes": stat.st_size,
|
|
165
|
-
"modified_time": stat.st_mtime,
|
|
166
|
-
}
|
|
167
|
-
)
|
|
168
|
-
|
|
169
|
-
if len(results) >= max_results:
|
|
170
|
-
break
|
|
171
|
-
|
|
172
|
-
# Sorting
|
|
173
|
-
if sort_by == "path":
|
|
174
|
-
results.sort(key=lambda x: x["path"])
|
|
175
|
-
elif sort_by == "modified":
|
|
176
|
-
results.sort(key=lambda x: x["modified_time"], reverse=True)
|
|
177
|
-
elif sort_by == "size":
|
|
178
|
-
results.sort(key=lambda x: x["size_bytes"], reverse=True)
|
|
179
|
-
|
|
180
|
-
return {"pattern": pattern, "count": len(results), "results": results}
|
|
181
|
-
|
|
182
|
-
|
|
183
22
|
# -----------------------------
|
|
184
23
|
# Expression evaluation tools
|
|
185
24
|
# -----------------------------
|
|
@@ -522,19 +361,12 @@ def install(ctx):
|
|
|
522
361
|
g_ctx = ctx
|
|
523
362
|
group = "core_tools"
|
|
524
363
|
# Examples of registering tools using automatic definition generation
|
|
525
|
-
ctx.register_tool(
|
|
526
|
-
ctx.register_tool(memory_write, group=group)
|
|
527
|
-
# ctx.register_tool(semantic_search) # TODO: implement
|
|
528
|
-
ctx.register_tool(read_file, group=group)
|
|
529
|
-
ctx.register_tool(write_file, group=group)
|
|
530
|
-
ctx.register_tool(list_directory, group=group)
|
|
531
|
-
ctx.register_tool(glob_paths, group=group)
|
|
364
|
+
ctx.register_tool(get_current_time, group=group)
|
|
532
365
|
ctx.register_tool(calc, group=group)
|
|
533
366
|
ctx.register_tool(run_python, group=group)
|
|
534
367
|
ctx.register_tool(run_typescript, group=group)
|
|
535
368
|
ctx.register_tool(run_javascript, group=group)
|
|
536
369
|
ctx.register_tool(run_csharp, group=group)
|
|
537
|
-
ctx.register_tool(get_current_time, group=group)
|
|
538
370
|
|
|
539
371
|
def exec_language(language: str, code: str) -> Dict[str, Any]:
|
|
540
372
|
if language == "python":
|
|
@@ -16,10 +16,10 @@ Create a config file at `~/.llms/users/default/github_auth/config.json`:
|
|
|
16
16
|
```json
|
|
17
17
|
{
|
|
18
18
|
"enabled": true,
|
|
19
|
-
"client_id": "
|
|
20
|
-
"client_secret": "
|
|
19
|
+
"client_id": "GITHUB_CLIENT_ID",
|
|
20
|
+
"client_secret": "GITHUB_CLIENT_SECRET",
|
|
21
21
|
"redirect_uri": "http://localhost:8000/auth/github/callback",
|
|
22
|
-
"restrict_to": "
|
|
22
|
+
"restrict_to": "GITHUB_USERS"
|
|
23
23
|
}
|
|
24
24
|
```
|
|
25
25
|
|
|
@@ -71,7 +71,7 @@ def install(ctx):
|
|
|
71
71
|
|
|
72
72
|
# Adding an Auth Provider forces Authentication to be enabled
|
|
73
73
|
auth_provider = GitHubAuthProvider(g_app)
|
|
74
|
-
|
|
74
|
+
ctx.add_auth_provider(auth_provider)
|
|
75
75
|
|
|
76
76
|
# OAuth handlers
|
|
77
77
|
async def github_auth_handler(request):
|
|
@@ -244,6 +244,23 @@ def install(ctx):
|
|
|
244
244
|
return web.json_response(g_app.error_auth_required, status=401)
|
|
245
245
|
|
|
246
246
|
ctx.add_get("/auth", auth_handler)
|
|
247
|
+
|
|
248
|
+
if ctx.debug:
|
|
249
|
+
|
|
250
|
+
async def debug_auth_handler(request):
|
|
251
|
+
return web.json_response(
|
|
252
|
+
{
|
|
253
|
+
"get_session_token": auth_provider.get_session_token(request),
|
|
254
|
+
"get_session": ctx.get_session(request),
|
|
255
|
+
"get_username": ctx.get_username(request),
|
|
256
|
+
"check_auth": ctx.check_auth(request),
|
|
257
|
+
# "sessions": list(g_app.sessions.keys()),
|
|
258
|
+
# "oauth_states": list(g_app.oauth_states.keys()),
|
|
259
|
+
}
|
|
260
|
+
)
|
|
261
|
+
|
|
262
|
+
ctx.add_get("/auth/debug", debug_auth_handler)
|
|
263
|
+
|
|
247
264
|
ctx.add_get("/auth/github", github_auth_handler)
|
|
248
265
|
ctx.add_get("/auth/github/callback", github_callback_handler)
|
|
249
266
|
ctx.add_get("/auth/github/callback{tail:.*}", github_callback_handler)
|
|
@@ -174,7 +174,7 @@ def install_anthropic(ctx):
|
|
|
174
174
|
self.chat_url,
|
|
175
175
|
headers=self.headers,
|
|
176
176
|
data=json.dumps(anthropic_request),
|
|
177
|
-
timeout=
|
|
177
|
+
timeout=ctx.get_client_timeout(),
|
|
178
178
|
) as response:
|
|
179
179
|
return ctx.log_json(
|
|
180
180
|
self.to_response(await self.response_json(response), chat, started_at, context=context)
|
|
@@ -378,7 +378,7 @@ def install_google(ctx):
|
|
|
378
378
|
gemini_chat_url,
|
|
379
379
|
headers=self.headers,
|
|
380
380
|
data=json.dumps(gemini_chat),
|
|
381
|
-
timeout=
|
|
381
|
+
timeout=ctx.get_client_timeout(),
|
|
382
382
|
) as res:
|
|
383
383
|
obj = await self.response_json(res)
|
|
384
384
|
if context is not None:
|
|
@@ -96,7 +96,7 @@ def install_nvidia(ctx):
|
|
|
96
96
|
gen_url,
|
|
97
97
|
headers=headers,
|
|
98
98
|
data=json.dumps(gen_request),
|
|
99
|
-
timeout=
|
|
99
|
+
timeout=ctx.get_client_timeout(),
|
|
100
100
|
) as response:
|
|
101
101
|
return self.to_response(await self.response_json(response), chat, started_at, context=context)
|
|
102
102
|
|
|
@@ -9,9 +9,6 @@ import aiohttp
|
|
|
9
9
|
|
|
10
10
|
from .parser import read_properties
|
|
11
11
|
|
|
12
|
-
g_skills = {}
|
|
13
|
-
g_home_skills = None
|
|
14
|
-
|
|
15
12
|
# Example of what's returned from https://skills.sh/api/skills?limit=5000&offset=0 > ui/data/skills-top-5000.json
|
|
16
13
|
# {
|
|
17
14
|
# "id": "vercel-react-best-practices",
|
|
@@ -47,84 +44,27 @@ def get_skill_files(skill_dir: Path) -> list:
|
|
|
47
44
|
return files
|
|
48
45
|
|
|
49
46
|
|
|
50
|
-
def reload_skill(name: str, location: str, group: str):
|
|
51
|
-
"""Reload a single skill's metadata."""
|
|
52
|
-
global g_skills
|
|
53
|
-
skill_dir = Path(location).resolve()
|
|
54
|
-
if not skill_dir.exists():
|
|
55
|
-
if name in g_skills:
|
|
56
|
-
del g_skills[name]
|
|
57
|
-
return None
|
|
58
|
-
|
|
59
|
-
props = read_properties(skill_dir)
|
|
60
|
-
files = get_skill_files(skill_dir)
|
|
61
|
-
|
|
62
|
-
skill_props = props.to_dict()
|
|
63
|
-
skill_props.update(
|
|
64
|
-
{
|
|
65
|
-
"group": group,
|
|
66
|
-
"location": str(skill_dir),
|
|
67
|
-
"files": files,
|
|
68
|
-
}
|
|
69
|
-
)
|
|
70
|
-
g_skills[props.name] = skill_props
|
|
71
|
-
return skill_props
|
|
72
|
-
|
|
73
|
-
|
|
74
47
|
def sanitize(name: str) -> str:
|
|
75
48
|
return name.replace(" ", "").replace("_", "").replace("-", "").lower()
|
|
76
49
|
|
|
77
50
|
|
|
78
|
-
def
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
sanitized_name = sanitize(name)
|
|
84
|
-
for k, v in g_skills.items():
|
|
85
|
-
if sanitize(k) == sanitized_name:
|
|
86
|
-
skill = v
|
|
87
|
-
break
|
|
51
|
+
def resolve_user_skills_path(ctx, user):
|
|
52
|
+
if not user:
|
|
53
|
+
raise ValueError("User is required")
|
|
54
|
+
user_path = ctx.get_user_path(user)
|
|
55
|
+
return os.path.join(user_path, "skills")
|
|
88
56
|
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
return
|
|
94
|
-
|
|
95
|
-
if file:
|
|
96
|
-
if file.startswith(location):
|
|
97
|
-
file = file[len(location) + 1 :]
|
|
98
|
-
if not os.path.exists(os.path.join(location, file)):
|
|
99
|
-
return f"Error: File {file} not found in skill {name}. Available files: {', '.join(skill.get('files', []))}"
|
|
100
|
-
with open(os.path.join(location, file)) as f:
|
|
101
|
-
return f.read()
|
|
102
|
-
|
|
103
|
-
with open(os.path.join(location, "SKILL.md")) as f:
|
|
104
|
-
content = f.read()
|
|
105
|
-
|
|
106
|
-
files = skill.get("files")
|
|
107
|
-
if files and len(files) > 1:
|
|
108
|
-
content += "\n\n## Skill Files:\n```\n"
|
|
109
|
-
for file in files:
|
|
110
|
-
content += f"{file}\n"
|
|
111
|
-
content += "```\n"
|
|
112
|
-
return content
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
def install(ctx):
|
|
116
|
-
global g_skills, g_home_skills
|
|
57
|
+
def resolve_skills_write_path(ctx, user=None):
|
|
58
|
+
if user:
|
|
59
|
+
user_skills_path = resolve_user_skills_path(ctx, user)
|
|
60
|
+
os.makedirs(user_skills_path, exist_ok=True)
|
|
61
|
+
return user_skills_path
|
|
117
62
|
home_skills = ctx.get_home_path(os.path.join(".agent", "skills"))
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
if not os.path.exists(home_skills):
|
|
121
|
-
os.makedirs(ctx.get_home_path(os.path.join(".agent")), exist_ok=True)
|
|
122
|
-
ctx.log(f"Creating initial skills folder: {home_skills}")
|
|
123
|
-
# os.makedirs(home_skills)
|
|
124
|
-
# copy ui/skills to home_skills
|
|
125
|
-
ui_skills = os.path.join(ctx.path, "ui", "skills")
|
|
126
|
-
shutil.copytree(ui_skills, home_skills)
|
|
63
|
+
os.makedirs(home_skills, exist_ok=True)
|
|
64
|
+
return home_skills
|
|
127
65
|
|
|
66
|
+
def resolve_all_skills(ctx, user=None):
|
|
67
|
+
home_skills = ctx.get_home_path(os.path.join(".agent", "skills"))
|
|
128
68
|
skill_roots = {}
|
|
129
69
|
|
|
130
70
|
# add .claude skills first, so they can be overridden by .agent skills
|
|
@@ -142,7 +82,13 @@ def install(ctx):
|
|
|
142
82
|
local_skills = str(Path(local_skills).resolve())
|
|
143
83
|
skill_roots[LLMS_LOCAL_SKILLS] = local_skills
|
|
144
84
|
|
|
145
|
-
|
|
85
|
+
user_skills_path = None
|
|
86
|
+
if user:
|
|
87
|
+
user_skills_path = resolve_user_skills_path(ctx, user)
|
|
88
|
+
if os.path.exists(user_skills_path):
|
|
89
|
+
skill_roots[f"{user}/skills"] = user_skills_path
|
|
90
|
+
|
|
91
|
+
ret = {}
|
|
146
92
|
for group, root in skill_roots.items():
|
|
147
93
|
if not os.path.exists(root):
|
|
148
94
|
continue
|
|
@@ -164,18 +110,55 @@ def install(ctx):
|
|
|
164
110
|
rel_path = full_path[len(str(skill_dir)) + 1 :]
|
|
165
111
|
files.append(rel_path)
|
|
166
112
|
|
|
113
|
+
writable = False
|
|
114
|
+
if ctx.is_auth_enabled():
|
|
115
|
+
writable = user_skills_path and is_safe_path(user_skills_path, skill_dir)
|
|
116
|
+
else:
|
|
117
|
+
writable = is_safe_path(home_skills, skill_dir) or is_safe_path(local_skills, skill_dir)
|
|
118
|
+
|
|
167
119
|
skill_props = props.to_dict()
|
|
168
120
|
skill_props.update(
|
|
169
121
|
{
|
|
170
122
|
"group": group,
|
|
171
123
|
"location": str(skill_dir),
|
|
172
124
|
"files": files,
|
|
125
|
+
"writable": bool(writable),
|
|
173
126
|
}
|
|
174
127
|
)
|
|
175
|
-
|
|
128
|
+
ret[props.name] = skill_props
|
|
176
129
|
|
|
177
130
|
except OSError:
|
|
178
131
|
pass
|
|
132
|
+
return ret
|
|
133
|
+
|
|
134
|
+
def assert_valid_location(ctx, location, user):
|
|
135
|
+
if ctx.is_auth_enabled() and not user:
|
|
136
|
+
raise Exception("Unauthorized")
|
|
137
|
+
|
|
138
|
+
# if user is specified, only allow modifications to skills in user directory
|
|
139
|
+
if user:
|
|
140
|
+
write_skill_path = resolve_skills_write_path(ctx, user=user)
|
|
141
|
+
if not is_safe_path(write_skill_path, location):
|
|
142
|
+
raise Exception("Cannot modify skills outside of allowed user directory")
|
|
143
|
+
return
|
|
144
|
+
|
|
145
|
+
home_skills_path = ctx.get_home_path(os.path.join(".agent", "skills"))
|
|
146
|
+
local_skills_path = os.path.join(".agent", "skills")
|
|
147
|
+
|
|
148
|
+
# Otherwise only allow modifications to skills in home or local .agent directory
|
|
149
|
+
if not is_safe_path(home_skills_path, location) and not is_safe_path(local_skills_path, location):
|
|
150
|
+
raise Exception("Cannot modify skills outside of allowed directories")
|
|
151
|
+
|
|
152
|
+
def install(ctx):
|
|
153
|
+
home_skills = ctx.get_home_path(os.path.join(".agent", "skills"))
|
|
154
|
+
# if not folder exists
|
|
155
|
+
if not os.path.exists(home_skills):
|
|
156
|
+
os.makedirs(ctx.get_home_path(os.path.join(".agent")), exist_ok=True)
|
|
157
|
+
ctx.log(f"Creating initial skills folder: {home_skills}")
|
|
158
|
+
# os.makedirs(home_skills)
|
|
159
|
+
# copy ui/skills to home_skills
|
|
160
|
+
ui_skills = os.path.join(ctx.path, "ui", "skills")
|
|
161
|
+
shutil.copytree(ui_skills, home_skills)
|
|
179
162
|
|
|
180
163
|
g_available_skills = []
|
|
181
164
|
try:
|
|
@@ -186,7 +169,8 @@ def install(ctx):
|
|
|
186
169
|
pass
|
|
187
170
|
|
|
188
171
|
async def get_skills(request):
|
|
189
|
-
|
|
172
|
+
skills = resolve_all_skills(ctx, user=ctx.get_username(request))
|
|
173
|
+
return aiohttp.web.json_response(skills)
|
|
190
174
|
|
|
191
175
|
ctx.add_get("", get_skills)
|
|
192
176
|
|
|
@@ -220,35 +204,22 @@ def install(ctx):
|
|
|
220
204
|
if not source:
|
|
221
205
|
raise Exception(f"Skill '{id}' has no source repository")
|
|
222
206
|
|
|
207
|
+
user = ctx.assert_username(request)
|
|
208
|
+
write_skill_path = resolve_skills_write_path(ctx, user=user)
|
|
209
|
+
|
|
223
210
|
# Install from GitHub
|
|
224
211
|
from .installer import install_from_github
|
|
225
212
|
|
|
213
|
+
ctx.log(f"Installing skill '{id}' from '{source}' to '{write_skill_path}'")
|
|
226
214
|
result = await install_from_github(
|
|
227
215
|
repo_url=f"https://github.com/{source}.git",
|
|
228
216
|
skill_names=[id],
|
|
229
|
-
target_dir=
|
|
217
|
+
target_dir=write_skill_path,
|
|
230
218
|
)
|
|
231
219
|
|
|
232
220
|
if not result.get("success"):
|
|
233
221
|
raise Exception(result.get("error", "Installation failed"))
|
|
234
222
|
|
|
235
|
-
# Reload the installed skills into the registry
|
|
236
|
-
for installed in result.get("installed", []):
|
|
237
|
-
skill_path = installed.get("path")
|
|
238
|
-
if skill_path and os.path.exists(skill_path):
|
|
239
|
-
skill_dir = Path(skill_path).resolve()
|
|
240
|
-
props = read_properties(skill_dir)
|
|
241
|
-
files = get_skill_files(skill_dir)
|
|
242
|
-
skill_props = props.to_dict()
|
|
243
|
-
skill_props.update(
|
|
244
|
-
{
|
|
245
|
-
"group": LLMS_HOME_SKILLS,
|
|
246
|
-
"location": str(skill_dir),
|
|
247
|
-
"files": files,
|
|
248
|
-
}
|
|
249
|
-
)
|
|
250
|
-
g_skills[props.name] = skill_props
|
|
251
|
-
|
|
252
223
|
return aiohttp.web.json_response(result)
|
|
253
224
|
|
|
254
225
|
ctx.add_post("install/{id}", install_skill)
|
|
@@ -256,7 +227,8 @@ def install(ctx):
|
|
|
256
227
|
async def get_skill(request):
|
|
257
228
|
name = request.match_info.get("name")
|
|
258
229
|
file = request.query.get("file")
|
|
259
|
-
|
|
230
|
+
user = ctx.assert_username(request)
|
|
231
|
+
return aiohttp.web.Response(text=skill(name, file, user=user))
|
|
260
232
|
|
|
261
233
|
ctx.add_get("contents/{name}", get_skill)
|
|
262
234
|
|
|
@@ -264,8 +236,10 @@ def install(ctx):
|
|
|
264
236
|
"""Get the content of a specific file in a skill."""
|
|
265
237
|
name = request.match_info.get("name")
|
|
266
238
|
file_path = request.match_info.get("path")
|
|
239
|
+
user = ctx.assert_username(request)
|
|
240
|
+
skills = resolve_all_skills(ctx, user=user)
|
|
267
241
|
|
|
268
|
-
skill_info =
|
|
242
|
+
skill_info = skills.get(name)
|
|
269
243
|
if not skill_info:
|
|
270
244
|
raise Exception(f"Skill '{name}' not found")
|
|
271
245
|
|
|
@@ -288,7 +262,7 @@ def install(ctx):
|
|
|
288
262
|
ctx.add_get("file/{name}/{path:.*}", get_file_content)
|
|
289
263
|
|
|
290
264
|
async def save_file(request):
|
|
291
|
-
"""Save/update a file in a skill. Only works for skills in home directory."""
|
|
265
|
+
"""Save/update a file in a skill. Only works for skills in user home or local directory."""
|
|
292
266
|
name = request.match_info.get("name")
|
|
293
267
|
|
|
294
268
|
try:
|
|
@@ -302,15 +276,15 @@ def install(ctx):
|
|
|
302
276
|
if not file_path or content is None:
|
|
303
277
|
raise Exception("Missing 'path' or 'content' in request body")
|
|
304
278
|
|
|
305
|
-
|
|
279
|
+
user = ctx.assert_username(request)
|
|
280
|
+
skills = resolve_all_skills(ctx, user=user)
|
|
281
|
+
skill_info = skills.get(name)
|
|
306
282
|
if not skill_info:
|
|
307
283
|
raise Exception(f"Skill '{name}' not found")
|
|
308
284
|
|
|
309
285
|
location = skill_info.get("location")
|
|
310
286
|
|
|
311
|
-
|
|
312
|
-
if not is_safe_path(home_skills, location) and not (local_skills and is_safe_path(local_skills, location)):
|
|
313
|
-
raise Exception("Cannot modify skills outside of allowed directories")
|
|
287
|
+
assert_valid_location(ctx, location, user)
|
|
314
288
|
|
|
315
289
|
full_path = os.path.join(location, file_path)
|
|
316
290
|
|
|
@@ -324,10 +298,10 @@ def install(ctx):
|
|
|
324
298
|
f.write(content)
|
|
325
299
|
|
|
326
300
|
# Reload skill metadata
|
|
327
|
-
|
|
328
|
-
|
|
301
|
+
skills = resolve_all_skills(ctx, user=user)
|
|
302
|
+
skill_info = skills.get(name)
|
|
329
303
|
|
|
330
|
-
return aiohttp.web.json_response({"path": file_path, "skill":
|
|
304
|
+
return aiohttp.web.json_response({"path": file_path, "skill": skill_info})
|
|
331
305
|
except Exception as e:
|
|
332
306
|
raise Exception(str(e)) from e
|
|
333
307
|
|
|
@@ -341,15 +315,14 @@ def install(ctx):
|
|
|
341
315
|
if not file_path:
|
|
342
316
|
raise Exception("Missing 'path' query parameter")
|
|
343
317
|
|
|
344
|
-
|
|
318
|
+
user = ctx.assert_username(request)
|
|
319
|
+
skills = resolve_all_skills(ctx, user=user)
|
|
320
|
+
skill_info = skills.get(name)
|
|
345
321
|
if not skill_info:
|
|
346
322
|
raise Exception(f"Skill '{name}' not found")
|
|
347
323
|
|
|
348
324
|
location = skill_info.get("location")
|
|
349
|
-
|
|
350
|
-
# Only allow modifications to skills in home or local .agent directory
|
|
351
|
-
if not is_safe_path(home_skills, location) and not (local_skills and is_safe_path(local_skills, location)):
|
|
352
|
-
raise Exception("Cannot modify skills outside of allowed directories")
|
|
325
|
+
assert_valid_location(ctx, location, user)
|
|
353
326
|
|
|
354
327
|
full_path = os.path.join(location, file_path)
|
|
355
328
|
|
|
@@ -376,10 +349,10 @@ def install(ctx):
|
|
|
376
349
|
break
|
|
377
350
|
|
|
378
351
|
# Reload skill metadata
|
|
379
|
-
|
|
380
|
-
|
|
352
|
+
skills = resolve_all_skills(ctx, user=user)
|
|
353
|
+
skill_info = skills.get(name)
|
|
381
354
|
|
|
382
|
-
return aiohttp.web.json_response({"path": file_path, "skill":
|
|
355
|
+
return aiohttp.web.json_response({"path": file_path, "skill": skill_info})
|
|
383
356
|
except Exception as e:
|
|
384
357
|
raise Exception(str(e)) from e
|
|
385
358
|
|
|
@@ -405,7 +378,9 @@ def install(ctx):
|
|
|
405
378
|
if len(skill_name) > 40:
|
|
406
379
|
raise Exception("Skill name must be 40 characters or less")
|
|
407
380
|
|
|
408
|
-
|
|
381
|
+
user = ctx.assert_username(request)
|
|
382
|
+
write_skill_path = resolve_skills_write_path(ctx, user=user)
|
|
383
|
+
skill_dir = os.path.join(write_skill_path, skill_name)
|
|
409
384
|
|
|
410
385
|
if os.path.exists(skill_dir):
|
|
411
386
|
raise Exception(f"Skill '{skill_name}' already exists")
|
|
@@ -419,8 +394,9 @@ def install(ctx):
|
|
|
419
394
|
try:
|
|
420
395
|
import subprocess
|
|
421
396
|
|
|
397
|
+
ctx.log(f"Creating skill '{skill_name}' in '{write_skill_path}'")
|
|
422
398
|
result = subprocess.run(
|
|
423
|
-
[sys.executable, init_script, skill_name, "--path",
|
|
399
|
+
[sys.executable, init_script, skill_name, "--path", write_skill_path],
|
|
424
400
|
capture_output=True,
|
|
425
401
|
text=True,
|
|
426
402
|
timeout=30,
|
|
@@ -431,21 +407,9 @@ def install(ctx):
|
|
|
431
407
|
|
|
432
408
|
# Load the new skill
|
|
433
409
|
if os.path.exists(skill_dir):
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
skill_props = props.to_dict()
|
|
439
|
-
skill_props.update(
|
|
440
|
-
{
|
|
441
|
-
"group": LLMS_HOME_SKILLS,
|
|
442
|
-
"location": str(skill_dir_path),
|
|
443
|
-
"files": files,
|
|
444
|
-
}
|
|
445
|
-
)
|
|
446
|
-
g_skills[props.name] = skill_props
|
|
447
|
-
|
|
448
|
-
return aiohttp.web.json_response({"skill": skill_props, "output": result.stdout})
|
|
410
|
+
skills = resolve_all_skills(ctx, user=user)
|
|
411
|
+
skill_info = skills.get(skill_name)
|
|
412
|
+
return aiohttp.web.json_response({"skill": skill_info, "output": result.stdout})
|
|
449
413
|
|
|
450
414
|
raise Exception("Skill directory not created")
|
|
451
415
|
|
|
@@ -460,27 +424,26 @@ def install(ctx):
|
|
|
460
424
|
"""Delete an entire skill. Only works for skills in home directory."""
|
|
461
425
|
name = request.match_info.get("name")
|
|
462
426
|
|
|
463
|
-
|
|
427
|
+
user = ctx.assert_username(request)
|
|
428
|
+
skills = resolve_all_skills(ctx, user=user)
|
|
429
|
+
skill_info = skills.get(name)
|
|
464
430
|
|
|
465
431
|
if skill_info:
|
|
466
432
|
location = skill_info.get("location")
|
|
467
433
|
else:
|
|
468
|
-
# Check if orphaned directory exists on disk (not loaded in
|
|
434
|
+
# Check if orphaned directory exists on disk (not loaded in skills)
|
|
469
435
|
potential_location = os.path.join(home_skills, name)
|
|
470
|
-
if os.path.exists(potential_location)
|
|
436
|
+
if os.path.exists(potential_location):
|
|
471
437
|
location = potential_location
|
|
472
438
|
else:
|
|
473
439
|
raise Exception(f"Skill '{name}' not found")
|
|
474
440
|
|
|
475
|
-
# Only allow deletion of skills in
|
|
476
|
-
|
|
477
|
-
raise Exception("Cannot delete skills outside of allowed directories")
|
|
441
|
+
# Only allow deletion of skills in allowed directories
|
|
442
|
+
assert_valid_location(ctx, location, user)
|
|
478
443
|
|
|
479
444
|
try:
|
|
480
445
|
if os.path.exists(location):
|
|
481
446
|
shutil.rmtree(location)
|
|
482
|
-
if name in g_skills:
|
|
483
|
-
del g_skills[name]
|
|
484
447
|
|
|
485
448
|
return aiohttp.web.json_response({"deleted": name})
|
|
486
449
|
except Exception as e:
|
|
@@ -488,6 +451,42 @@ def install(ctx):
|
|
|
488
451
|
|
|
489
452
|
ctx.add_delete("skill/{name}", delete_skill)
|
|
490
453
|
|
|
454
|
+
def skill(name: Annotated[str, "skill name"], file: Annotated[str | None, "skill file"] = None, user=None):
|
|
455
|
+
"""Get the content of a skill or a specific file within a skill."""
|
|
456
|
+
ctx.log(f"skill tool '{name}', file='{file}', user='{user}'")
|
|
457
|
+
|
|
458
|
+
skills = resolve_all_skills(ctx, user=user)
|
|
459
|
+
skill = skills.get(name)
|
|
460
|
+
|
|
461
|
+
if not skill:
|
|
462
|
+
sanitized_name = sanitize(name)
|
|
463
|
+
for k, v in skills.items():
|
|
464
|
+
if sanitize(k) == sanitized_name:
|
|
465
|
+
skill = v
|
|
466
|
+
break
|
|
467
|
+
|
|
468
|
+
if not skill:
|
|
469
|
+
return f"Error: Skill {name} not found. Available skills: {', '.join(skills.keys())}"
|
|
470
|
+
location = skill.get("location")
|
|
471
|
+
if not location or not os.path.exists(location):
|
|
472
|
+
return f"Error: Skill {name} not found at location {location}"
|
|
473
|
+
|
|
474
|
+
if file:
|
|
475
|
+
if file.startswith(location):
|
|
476
|
+
file = file[len(location) + 1 :]
|
|
477
|
+
if not os.path.exists(os.path.join(location, file)):
|
|
478
|
+
return f"Error: File {file} not found in skill {name}. Available files: {', '.join(skill.get('files', []))}"
|
|
479
|
+
with open(os.path.join(location, file)) as f:
|
|
480
|
+
return f.read()
|
|
481
|
+
|
|
482
|
+
with open(os.path.join(location, "SKILL.md")) as f:
|
|
483
|
+
content = f.read()
|
|
484
|
+
|
|
485
|
+
files = skill.get("files")
|
|
486
|
+
if files and len(files) > 1:
|
|
487
|
+
content += "\n\n## Skill Files:\n```\n"
|
|
488
|
+
return content
|
|
489
|
+
|
|
491
490
|
ctx.register_tool(skill, group="core_tools")
|
|
492
491
|
|
|
493
492
|
|
|
@@ -3,9 +3,6 @@ import { leftPart } from "@servicestack/client"
|
|
|
3
3
|
|
|
4
4
|
let ext
|
|
5
5
|
|
|
6
|
-
const LLMS_HOME_SKILLS = "~/.llms/.agent/skills"
|
|
7
|
-
const LLMS_LOCAL_SKILLS = ".agent/skills"
|
|
8
|
-
|
|
9
6
|
const SkillSelector = {
|
|
10
7
|
template: `
|
|
11
8
|
<div class="px-4 py-4 bg-gray-50 dark:bg-gray-800 border-b border-gray-200 dark:border-gray-700 max-h-[80vh] overflow-y-auto">
|
|
@@ -122,10 +119,10 @@ const SkillSelector = {
|
|
|
122
119
|
skills
|
|
123
120
|
}))
|
|
124
121
|
|
|
125
|
-
// Sort groups: writable
|
|
122
|
+
// Sort groups: writable first, then alphabetically
|
|
126
123
|
definedGroups.sort((a, b) => {
|
|
127
|
-
const aEditable = a.
|
|
128
|
-
const bEditable = b.
|
|
124
|
+
const aEditable = a.skills.some(s => s.writable)
|
|
125
|
+
const bEditable = b.skills.some(s => s.writable)
|
|
129
126
|
if (aEditable !== bEditable) return aEditable ? -1 : 1
|
|
130
127
|
return a.name.localeCompare(b.name)
|
|
131
128
|
})
|
|
@@ -392,6 +389,7 @@ const SkillPage = {
|
|
|
392
389
|
const editorRef = ref(null)
|
|
393
390
|
const expandedSkills = ref({})
|
|
394
391
|
const skills = computed(() => ctx.state.skills || {})
|
|
392
|
+
|
|
395
393
|
const skillGroups = computed(() => {
|
|
396
394
|
const grouped = {}
|
|
397
395
|
const query = searchQuery.value.toLowerCase()
|
|
@@ -402,8 +400,8 @@ const SkillPage = {
|
|
|
402
400
|
grouped[group].push(skill)
|
|
403
401
|
})
|
|
404
402
|
return Object.entries(grouped).sort((a, b) => {
|
|
405
|
-
const aEditable = a[
|
|
406
|
-
const bEditable = b[
|
|
403
|
+
const aEditable = a[1].some(s => s.writable)
|
|
404
|
+
const bEditable = b[1].some(s => s.writable)
|
|
407
405
|
if (aEditable !== bEditable) return aEditable ? -1 : 1
|
|
408
406
|
return a[0].localeCompare(b[0])
|
|
409
407
|
}).map(([name, skills]) => ({ name, skills: skills.sort((a, b) => a.name.localeCompare(b.name)) }))
|
|
@@ -426,8 +424,8 @@ const SkillPage = {
|
|
|
426
424
|
return tree.sort((a, b) => { if (a.isFile !== b.isFile) return a.isFile ? 1 : -1; return a.name.localeCompare(b.name) })
|
|
427
425
|
}
|
|
428
426
|
const hasUnsavedChanges = computed(() => isEditing.value && editContent.value !== fileContent.value)
|
|
429
|
-
function isGroupEditable(groupName) { return
|
|
430
|
-
function isEditable(skill) { return skill?.
|
|
427
|
+
function isGroupEditable(groupName) { return Object.values(skills.value).some(s => s.group === groupName && s.writable) }
|
|
428
|
+
function isEditable(skill) { return skill?.writable }
|
|
431
429
|
function isSkillExpanded(name) { return !!expandedSkills.value[name] }
|
|
432
430
|
function toggleSkillExpand(skill) {
|
|
433
431
|
expandedSkills.value[skill.name] = !expandedSkills.value[skill.name]
|
|
@@ -923,14 +921,23 @@ export default {
|
|
|
923
921
|
},
|
|
924
922
|
show({ thread }) {
|
|
925
923
|
if (thread.messages.length < 2) return false
|
|
926
|
-
|
|
927
|
-
|
|
924
|
+
|
|
925
|
+
const lastMessage = thread.messages[thread.messages.length - 1]
|
|
926
|
+
// only show if the last message is from the assistant
|
|
927
|
+
if (lastMessage.role != "assistant") return false
|
|
928
|
+
|
|
929
|
+
// and it has a skill tool call
|
|
928
930
|
const hasSkillToolCall = thread.messages.some(m =>
|
|
929
931
|
m.tool_calls?.some(tc => tc.type == "function" && tc.function.name == "skill"))
|
|
932
|
+
// or a plan system prompt
|
|
930
933
|
const systemPrompt = thread.messages.find(m => m.role == "system")?.content.toLowerCase() || ''
|
|
931
934
|
const line1 = leftPart(systemPrompt.trim(), "\n")
|
|
932
935
|
const hasPlanSystemPrompt = line1.includes("plan") || systemPrompt.includes("# plan")
|
|
933
|
-
|
|
936
|
+
|
|
937
|
+
// or the last message has no content but has reasoning
|
|
938
|
+
const hasOnlyThinking = !lastMessage.content?.trim() && lastMessage.reasoning?.trim()
|
|
939
|
+
|
|
940
|
+
return hasSkillToolCall || hasPlanSystemPrompt || hasOnlyThinking
|
|
934
941
|
}
|
|
935
942
|
}
|
|
936
943
|
})
|
llms/llms.json
CHANGED
llms/main.py
CHANGED
|
@@ -57,11 +57,15 @@ except ImportError:
|
|
|
57
57
|
HAS_PIL = False
|
|
58
58
|
|
|
59
59
|
_ROOT = None
|
|
60
|
-
VERSION = "3.0.
|
|
60
|
+
VERSION = "3.0.27"
|
|
61
61
|
DEBUG = os.getenv("DEBUG") == "1"
|
|
62
62
|
MOCK = os.getenv("MOCK") == "1"
|
|
63
63
|
MOCK_DIR = os.getenv("MOCK_DIR")
|
|
64
64
|
DISABLE_EXTENSIONS = (os.getenv("LLMS_DISABLE") or "").split(",")
|
|
65
|
+
DEFAULT_LIMITS = {
|
|
66
|
+
"client_timeout": 120,
|
|
67
|
+
"client_max_size": 20971520,
|
|
68
|
+
}
|
|
65
69
|
g_config_path = None
|
|
66
70
|
g_config = None
|
|
67
71
|
g_providers = None
|
|
@@ -475,7 +479,7 @@ async def download_file(url):
|
|
|
475
479
|
|
|
476
480
|
async def session_download_file(session, url, default_mimetype="application/octet-stream"):
|
|
477
481
|
try:
|
|
478
|
-
async with session.get(url, timeout=
|
|
482
|
+
async with session.get(url, timeout=get_client_timeout()) as response:
|
|
479
483
|
response.raise_for_status()
|
|
480
484
|
content = await response.read()
|
|
481
485
|
mimetype = response.headers.get("Content-Type")
|
|
@@ -1294,7 +1298,7 @@ class OpenAiCompatible:
|
|
|
1294
1298
|
async with aiohttp.ClientSession() as session:
|
|
1295
1299
|
started_at = time.time()
|
|
1296
1300
|
async with session.post(
|
|
1297
|
-
self.chat_url, headers=self.headers, data=json.dumps(chat), timeout=
|
|
1301
|
+
self.chat_url, headers=self.headers, data=json.dumps(chat), timeout=get_client_timeout()
|
|
1298
1302
|
) as response:
|
|
1299
1303
|
chat["metadata"] = metadata
|
|
1300
1304
|
return self.to_response(await response_json(response), chat, started_at, context=context)
|
|
@@ -1361,7 +1365,7 @@ class OllamaProvider(OpenAiCompatible):
|
|
|
1361
1365
|
async with aiohttp.ClientSession() as session:
|
|
1362
1366
|
_log(f"GET {self.api}/api/tags")
|
|
1363
1367
|
async with session.get(
|
|
1364
|
-
f"{self.api}/api/tags", headers=self.headers, timeout=
|
|
1368
|
+
f"{self.api}/api/tags", headers=self.headers, timeout=get_client_timeout()
|
|
1365
1369
|
) as response:
|
|
1366
1370
|
data = await response_json(response)
|
|
1367
1371
|
for model in data.get("models", []):
|
|
@@ -1422,7 +1426,7 @@ class LMStudioProvider(OllamaProvider):
|
|
|
1422
1426
|
async with aiohttp.ClientSession() as session:
|
|
1423
1427
|
_log(f"GET {self.api}/models")
|
|
1424
1428
|
async with session.get(
|
|
1425
|
-
f"{self.api}/models", headers=self.headers, timeout=
|
|
1429
|
+
f"{self.api}/models", headers=self.headers, timeout=get_client_timeout()
|
|
1426
1430
|
) as response:
|
|
1427
1431
|
data = await response_json(response)
|
|
1428
1432
|
for model in data.get("data", []):
|
|
@@ -1759,6 +1763,15 @@ def convert_tool_args(function_name, function_args):
|
|
|
1759
1763
|
|
|
1760
1764
|
return function_args
|
|
1761
1765
|
|
|
1766
|
+
def get_tool_property(function_name, prop_name):
|
|
1767
|
+
tool_def = g_app.get_tool_definition(function_name)
|
|
1768
|
+
if not tool_def:
|
|
1769
|
+
return None
|
|
1770
|
+
if "function" in tool_def and "parameters" in tool_def["function"]:
|
|
1771
|
+
parameters = tool_def.get("function", {}).get("parameters")
|
|
1772
|
+
properties = parameters.get("properties", {})
|
|
1773
|
+
return properties.get(prop_name)
|
|
1774
|
+
return None
|
|
1762
1775
|
|
|
1763
1776
|
async def g_exec_tool(function_name, function_args):
|
|
1764
1777
|
_log(f"g_exec_tool: {function_name}")
|
|
@@ -1913,6 +1926,8 @@ async def g_chat_completion(chat, context=None):
|
|
|
1913
1926
|
except Exception as e:
|
|
1914
1927
|
tool_result = f"Error: Failed to parse JSON arguments for tool '{function_name}': {to_error_message(e)}"
|
|
1915
1928
|
else:
|
|
1929
|
+
if "user" in context and get_tool_property(function_name, "user"):
|
|
1930
|
+
function_args["user"] = context["user"]
|
|
1916
1931
|
tool_result, resources = await g_exec_tool(function_name, function_args)
|
|
1917
1932
|
|
|
1918
1933
|
# Append tool result to history
|
|
@@ -2798,6 +2813,9 @@ class AuthProvider:
|
|
|
2798
2813
|
|
|
2799
2814
|
def get_session(self, request: web.Request) -> Optional[Dict[str, Any]]:
|
|
2800
2815
|
session_token = self.get_session_token(request)
|
|
2816
|
+
# _dbg(
|
|
2817
|
+
# f"Session token: {session_token} / {len(self.app.sessions)} sessions = {session_token in self.app.sessions}"
|
|
2818
|
+
# )
|
|
2801
2819
|
|
|
2802
2820
|
if not session_token or session_token not in self.app.sessions:
|
|
2803
2821
|
return None
|
|
@@ -2819,6 +2837,12 @@ class AuthProvider:
|
|
|
2819
2837
|
return False, None
|
|
2820
2838
|
|
|
2821
2839
|
|
|
2840
|
+
def get_client_timeout(app=None):
|
|
2841
|
+
app = app or g_app
|
|
2842
|
+
timeout = app.limits.get("client_timeout", 120) if app else 120
|
|
2843
|
+
return aiohttp.ClientTimeout(total=timeout)
|
|
2844
|
+
|
|
2845
|
+
|
|
2822
2846
|
class AppExtensions:
|
|
2823
2847
|
"""
|
|
2824
2848
|
APIs extensions can use to extend the app
|
|
@@ -2828,6 +2852,7 @@ class AppExtensions:
|
|
|
2828
2852
|
self.cli_args = cli_args
|
|
2829
2853
|
self.extra_args = extra_args
|
|
2830
2854
|
self.config = None
|
|
2855
|
+
self.limits = DEFAULT_LIMITS
|
|
2831
2856
|
self.error_auth_required = create_error_response("Authentication required", "Unauthorized")
|
|
2832
2857
|
self.ui_extensions = []
|
|
2833
2858
|
self.chat_request_filters = []
|
|
@@ -2907,6 +2932,12 @@ class AppExtensions:
|
|
|
2907
2932
|
|
|
2908
2933
|
def set_config(self, config: Dict[str, Any]):
|
|
2909
2934
|
self.config = config
|
|
2935
|
+
self.limits = self.config.get("limits", DEFAULT_LIMITS)
|
|
2936
|
+
self.limits["client_timeout"] = self.limits.get("client_timeout", 120)
|
|
2937
|
+
self.limits["client_max_size"] = self.limits.get("client_max_size", 20971520)
|
|
2938
|
+
|
|
2939
|
+
def get_client_timeout(self):
|
|
2940
|
+
return get_client_timeout(self)
|
|
2910
2941
|
|
|
2911
2942
|
def set_allowed_directories(
|
|
2912
2943
|
self, directories: List[Annotated[str, "List of absolute paths that are allowed to be accessed."]]
|
|
@@ -2945,6 +2976,14 @@ class AppExtensions:
|
|
|
2945
2976
|
return username
|
|
2946
2977
|
return None
|
|
2947
2978
|
|
|
2979
|
+
def assert_username(self, request: web.Request) -> str:
|
|
2980
|
+
if not self.is_auth_enabled():
|
|
2981
|
+
return None
|
|
2982
|
+
username = self.get_username(request)
|
|
2983
|
+
if not username:
|
|
2984
|
+
raise Exception("Authentication required")
|
|
2985
|
+
return username
|
|
2986
|
+
|
|
2948
2987
|
def check_auth(self, request: web.Request) -> Tuple[bool, Optional[Dict[str, Any]]]:
|
|
2949
2988
|
"""Check if request is authenticated. Returns (is_authenticated, user_data)"""
|
|
2950
2989
|
if len(self.auth_providers) == 0:
|
|
@@ -3058,6 +3097,7 @@ class ExtensionContext:
|
|
|
3058
3097
|
def __init__(self, app: AppExtensions, path: str):
|
|
3059
3098
|
self.app = app
|
|
3060
3099
|
self.config = app.config
|
|
3100
|
+
self.limits = app.limits
|
|
3061
3101
|
self.cli_args = app.cli_args
|
|
3062
3102
|
self.extra_args = app.extra_args
|
|
3063
3103
|
self.error_auth_required = app.error_auth_required
|
|
@@ -3076,17 +3116,26 @@ class ExtensionContext:
|
|
|
3076
3116
|
self.oauth_states = app.oauth_states
|
|
3077
3117
|
self.disabled = False
|
|
3078
3118
|
|
|
3119
|
+
def get_client_timeout(self):
|
|
3120
|
+
return self.app.get_client_timeout()
|
|
3121
|
+
|
|
3079
3122
|
def add_auth_provider(self, auth_provider: AuthProvider) -> None:
|
|
3080
3123
|
"""Add an authentication provider."""
|
|
3081
3124
|
self.app.add_auth_provider(auth_provider)
|
|
3082
3125
|
self.log(f"Added Auth Provider: {auth_provider.__class__.__name__}, Authentication is now enabled")
|
|
3083
3126
|
|
|
3127
|
+
def is_auth_enabled(self) -> bool:
|
|
3128
|
+
return self.app.is_auth_enabled()
|
|
3129
|
+
|
|
3084
3130
|
def get_session(self, request: web.Request) -> Optional[Dict[str, Any]]:
|
|
3085
3131
|
return self.app.get_session(request)
|
|
3086
3132
|
|
|
3087
3133
|
def get_username(self, request: web.Request) -> Optional[str]:
|
|
3088
3134
|
return self.app.get_username(request)
|
|
3089
3135
|
|
|
3136
|
+
def assert_username(self, request: web.Request) -> Optional[str]:
|
|
3137
|
+
return self.app.assert_username(request)
|
|
3138
|
+
|
|
3090
3139
|
def check_auth(self, request: web.Request) -> Tuple[bool, Optional[Dict[str, Any]]]:
|
|
3091
3140
|
return self.app.check_auth(request)
|
|
3092
3141
|
|
llms/ui/ai.mjs
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: llms-py
|
|
3
|
-
Version: 3.0.
|
|
3
|
+
Version: 3.0.27
|
|
4
4
|
Summary: A lightweight CLI tool and OpenAI-compatible server for querying multiple Large Language Model (LLM) providers
|
|
5
5
|
Home-page: https://github.com/ServiceStack/llms
|
|
6
6
|
Author: ServiceStack
|
|
@@ -2,8 +2,8 @@ llms/__init__.py,sha256=DKwTZDsyYL_wHe7yvLw49Nf8PSgPSyWaeVdotUqSvrQ,84
|
|
|
2
2
|
llms/__main__.py,sha256=hrBulHIt3lmPm1BCyAEVtB6DQ0Hvc3gnIddhHCmJasg,151
|
|
3
3
|
llms/db.py,sha256=oozp5I5lECVO8oZEFwcZl3ES5mARqWeR1BkoqG5kSqM,11687
|
|
4
4
|
llms/index.html,sha256=nGk1Djtn9p7l6LuKp4Kg0JIB9fCzxtTWXFfmDb4ggpc,1658
|
|
5
|
-
llms/llms.json,sha256=
|
|
6
|
-
llms/main.py,sha256=
|
|
5
|
+
llms/llms.json,sha256=ar7f5uti80RrYhbsYLtCuqdHSFWWlwyVeV0nPCgUzeA,11564
|
|
6
|
+
llms/main.py,sha256=JETnTf2TkJd14WTM6nkfiaqH6VZ72JnaXY_ySWb2uZY,176657
|
|
7
7
|
llms/providers-extra.json,sha256=_6DmGBiQY9LM6_Y0zOiObYn7ba4g3akSNQfmHcYlENc,11101
|
|
8
8
|
llms/providers.json,sha256=yls3OUqPIBLSf2rk0xgwUHKkvd-8drGq4JW7w49rEws,299324
|
|
9
9
|
llms/extensions/analytics/ui/index.mjs,sha256=m1XwaqYCLwK267JAUCAltkN_nOXep0GxfpvGNS5i4_w,69547
|
|
@@ -19,11 +19,11 @@ llms/extensions/computer/base.py,sha256=Igio5R6kPQOxIbmpaA7X6j6eC4cpF3jwTTR8rURf
|
|
|
19
19
|
llms/extensions/computer/bash.py,sha256=-xo67wVAdrqxtXgR7MK-iAkJ4Wne7Dm1JmnuHC2xW8o,5953
|
|
20
20
|
llms/extensions/computer/computer.py,sha256=wehwcrYwi9usCRcziE_loMhWDbVgfjLk_T4_4TZa4W4,19642
|
|
21
21
|
llms/extensions/computer/edit.py,sha256=QluhvRhYSSQJfbih4QyfC4M8W8aVqiOApfYXZgZTI5M,12725
|
|
22
|
-
llms/extensions/computer/filesystem.py,sha256=
|
|
22
|
+
llms/extensions/computer/filesystem.py,sha256=V6UI2rtGxreQyPvYnXfeQQuLocTDloAQU1k_jCTybnY,21825
|
|
23
23
|
llms/extensions/computer/platform.py,sha256=w5ECar8lM4Lag7rTYUQmU7wEWaqCeejNXwwM3CB8ulQ,14866
|
|
24
24
|
llms/extensions/computer/run.py,sha256=ZIcoYyy2cc3IKR_T4yJgx6IUHu2m7UusIJi9Dx1s7dA,1566
|
|
25
25
|
llms/extensions/core_tools/CALCULATOR.md,sha256=pJRtCVF01BgxFrSNh2Ys_lrRi3SFwLgJzAX93AGh93Q,1944
|
|
26
|
-
llms/extensions/core_tools/__init__.py,sha256=
|
|
26
|
+
llms/extensions/core_tools/__init__.py,sha256=YazB9yM5mLEtZWmCb_Dguz9XU2aPw4O6QU2Jk0vWI44,16422
|
|
27
27
|
llms/extensions/core_tools/ui/index.mjs,sha256=KycJ2FcQ6BieBY7fjWGxVBGHN6WuFx712OFrO6flXww,31770
|
|
28
28
|
llms/extensions/core_tools/ui/codemirror/codemirror.css,sha256=60lOqXLSZh74b39qxlbdZ4bXIeScnBtG4euWfktvm_M,8720
|
|
29
29
|
llms/extensions/core_tools/ui/codemirror/codemirror.js,sha256=7cA89SlK249o7tVfiEWIiqDEA6ZEWxX4CoZmofVA14s,402008
|
|
@@ -49,8 +49,8 @@ llms/extensions/gallery/README.md,sha256=zif27qiMef1dBboMEPvTJqPDnLvrqntraVSw8oQ
|
|
|
49
49
|
llms/extensions/gallery/__init__.py,sha256=24cABEIyqD0zLi32Jrqxoe-qdsS_q4mdJK7NgS0SiEY,1756
|
|
50
50
|
llms/extensions/gallery/db.py,sha256=IgiwBnqdGf0GYWCltphLD0NWS_MnbPDpRUJszh2_DW0,9032
|
|
51
51
|
llms/extensions/gallery/ui/index.mjs,sha256=2a6dsZqCoey3So3QLCaHMqYSdzOcLR7ZBi60auFx-24,28356
|
|
52
|
-
llms/extensions/github_auth/README.md,sha256=
|
|
53
|
-
llms/extensions/github_auth/__init__.py,sha256=
|
|
52
|
+
llms/extensions/github_auth/README.md,sha256=LeACo4FFceOhmDXdN4ioA7Pm1oxvlQltC09B6lXYRIY,6797
|
|
53
|
+
llms/extensions/github_auth/__init__.py,sha256=9_Y1bKMVZH2X-QAmFkpl3cqwHaA06FH4oFDzOET4EIE,10532
|
|
54
54
|
llms/extensions/github_auth/ui/index.mjs,sha256=TR-FJh_g475DcLUzsyAbKbsnTNJoQy6XHNJl5EVG2T4,3544
|
|
55
55
|
llms/extensions/katex/README.md,sha256=fD_0riNXrKW0SfehifIc3jnXKg9diQdlXVq5X_e1zTc,1473
|
|
56
56
|
llms/extensions/katex/__init__.py,sha256=CvEe9CPtzoANc4AFuxZTbrJZdggoWLuKnENKfEdncPQ,206
|
|
@@ -140,23 +140,23 @@ llms/extensions/katex/ui/fonts/KaTeX_Typewriter-Regular.ttf,sha256=8B8-h9nGphwMC
|
|
|
140
140
|
llms/extensions/katex/ui/fonts/KaTeX_Typewriter-Regular.woff,sha256=4U_tArGrp86fWv1YRLXQMhsiNR_rxyDg3ouHI1J2Cfc,16028
|
|
141
141
|
llms/extensions/katex/ui/fonts/KaTeX_Typewriter-Regular.woff2,sha256=cdUX1ngneHz6vfGGkUzDNY7aU543kxlB8rL9SiH2jAs,13568
|
|
142
142
|
llms/extensions/providers/__init__.py,sha256=C5zOBQEOB2L96rAZdjV42fPVk_dZxSh2Dv30Kb1w3lE,534
|
|
143
|
-
llms/extensions/providers/anthropic.py,sha256=
|
|
143
|
+
llms/extensions/providers/anthropic.py,sha256=T7ob4Sqb94k97d96TaJO2IvH53iMycvCxyW9lagO5mQ,12003
|
|
144
144
|
llms/extensions/providers/cerebras.py,sha256=iKPzsaRIBqQWXsgPQ50jsNPGx8Ud704VVUAXzDBHu7k,1368
|
|
145
145
|
llms/extensions/providers/chutes.py,sha256=5ZrfbqoOhgzKLQy_qULcp4jlvW5WXPR0jP9kN2Jzb9g,6229
|
|
146
|
-
llms/extensions/providers/google.py,sha256=
|
|
147
|
-
llms/extensions/providers/nvidia.py,sha256=
|
|
146
|
+
llms/extensions/providers/google.py,sha256=TAFS8WMQqBARrNlJhvMP1oUHQ-oLWZ4jwYLxEqwKyPI,27711
|
|
147
|
+
llms/extensions/providers/nvidia.py,sha256=uMyTwo2OZhikKzm8olp-321emeWeZCXIUty2cAWjkGU,4271
|
|
148
148
|
llms/extensions/providers/openai.py,sha256=n1jTXkWPrboTazCW0FX3zYYaOlsW44rQpIfgKBINyCQ,6382
|
|
149
149
|
llms/extensions/providers/openrouter.py,sha256=9V1NSZJblku_lfsDY8Fantr5QFzkp3A0hXFDCYucShM,3344
|
|
150
150
|
llms/extensions/providers/zai.py,sha256=WPJlDxmnjuhxlgWhUXw2MYqgEKYfy14wVQbpQyMMlbU,7736
|
|
151
151
|
llms/extensions/skills/LICENSE,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
|
|
152
152
|
llms/extensions/skills/README.md,sha256=wawmpeyiJFw9G8qzCZ3CiKamt47k6fO680gFX9wuOGs,10471
|
|
153
|
-
llms/extensions/skills/__init__.py,sha256=
|
|
153
|
+
llms/extensions/skills/__init__.py,sha256=exzBlxUjOKdI8NvjzZFigzQYpcLWtNqTT5prHobNmiQ,17792
|
|
154
154
|
llms/extensions/skills/errors.py,sha256=V4DTFNtzVADDlZ0g7RmoxZRFeG01oaG3zzaPAVdtkfQ,572
|
|
155
155
|
llms/extensions/skills/installer.py,sha256=GfNYRK6LbYSZtIOesQ_fQvjtBsiZC9sTXLVuUbrrdLI,12751
|
|
156
156
|
llms/extensions/skills/models.py,sha256=xmRfz8BMeOdzZXhW6MYFjkOVHOKD6bMDynId8aysans,1461
|
|
157
157
|
llms/extensions/skills/parser.py,sha256=Mb4NOtoY3Ip4Nng8ixb26oE8U_Sp5CI3n3l_qxbg1UM,5500
|
|
158
158
|
llms/extensions/skills/validator.py,sha256=te49hTfIPJWcMcLLCApSJ2Ru3lrqVF8ayDXtPEZF9sU,5154
|
|
159
|
-
llms/extensions/skills/ui/index.mjs,sha256=
|
|
159
|
+
llms/extensions/skills/ui/index.mjs,sha256=YYVaa4qWiLrcj9USnQWHz6NVblXzahV7R9SVJ-irUPc,63587
|
|
160
160
|
llms/extensions/skills/ui/data/skills-top-5000.json,sha256=e8fUF_NQSUZRK0rGRUqBFecW5JEX6QO6H_P3WqTBBQ4,540056
|
|
161
161
|
llms/extensions/skills/ui/skills/create-plan/SKILL.md,sha256=ZAtiM2qPHcc8Z3Ongl1NgX5ythITPwyvcIqisgqWrGA,2493
|
|
162
162
|
llms/extensions/skills/ui/skills/skill-creator/LICENSE.txt,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
|
|
@@ -173,7 +173,7 @@ llms/extensions/system_prompts/ui/prompts.json,sha256=t5DD3bird-87wFa4OlW-bC2wdo
|
|
|
173
173
|
llms/extensions/tools/__init__.py,sha256=PRZe0QMfsOymJ3jTqO0VFppNEWI4f2bYSOImK_YrGQM,2036
|
|
174
174
|
llms/extensions/tools/ui/index.mjs,sha256=1TgCn74oX_rUAhxO8w54HlIgNkHnI5ma-GCqXp-qYVY,39434
|
|
175
175
|
llms/ui/App.mjs,sha256=8yljf7M7LUp4q7XPEHTCUKpJB3X2d8ePnatRamwTM00,7622
|
|
176
|
-
llms/ui/ai.mjs,sha256=
|
|
176
|
+
llms/ui/ai.mjs,sha256=s3DYVkugdmqbkjePM4-koPoy7kPIFyAqGU8M__K2sI8,6540
|
|
177
177
|
llms/ui/app.css,sha256=SVVzmFhTd0chuGq5yhv3FjgNudgo6WXlM2fnb-csK4c,190220
|
|
178
178
|
llms/ui/ctx.mjs,sha256=4x-LTmofhf6OvLThSlDSTQOsLkzyBFOEMRGIOLHszqs,14974
|
|
179
179
|
llms/ui/fav.svg,sha256=_R6MFeXl6wBFT0lqcUxYQIDWgm246YH_3hSTW0oO8qw,734
|
|
@@ -199,9 +199,9 @@ llms/ui/modules/model-selector.mjs,sha256=6U4rAZ7vmQELFRQGWk4YEtq02v3lyHdMq6yUOp
|
|
|
199
199
|
llms/ui/modules/chat/ChatBody.mjs,sha256=OyjAQPHNIbdqEhQq01ysbx7Cbt1CezUERbgFpcbnrNI,58081
|
|
200
200
|
llms/ui/modules/chat/SettingsDialog.mjs,sha256=HMBJTwrapKrRIAstIIqp0QlJL5O-ho4hzgvfagPfsX8,19930
|
|
201
201
|
llms/ui/modules/chat/index.mjs,sha256=nS_L6G1RSuCybgnA6n-q8Sn3OeSbQWL2iW3-zCIFqJk,39548
|
|
202
|
-
llms_py-3.0.
|
|
203
|
-
llms_py-3.0.
|
|
204
|
-
llms_py-3.0.
|
|
205
|
-
llms_py-3.0.
|
|
206
|
-
llms_py-3.0.
|
|
207
|
-
llms_py-3.0.
|
|
202
|
+
llms_py-3.0.27.dist-info/licenses/LICENSE,sha256=bus9cuAOWeYqBk2OuhSABVV1P4z7hgrEFISpyda_H5w,1532
|
|
203
|
+
llms_py-3.0.27.dist-info/METADATA,sha256=Ojx7FD0AjI3enkul3b9wiEqEVnDf0Yo_QN-3uynyN4c,2195
|
|
204
|
+
llms_py-3.0.27.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
205
|
+
llms_py-3.0.27.dist-info/entry_points.txt,sha256=WswyE7PfnkZMIxboC-MS6flBD6wm-CYU7JSUnMhqMfM,40
|
|
206
|
+
llms_py-3.0.27.dist-info/top_level.txt,sha256=gC7hk9BKSeog8gyg-EM_g2gxm1mKHwFRfK-10BxOsa4,5
|
|
207
|
+
llms_py-3.0.27.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|