npcsh 1.1.14__py3-none-any.whl → 1.1.16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- npcsh/_state.py +533 -80
- npcsh/mcp_server.py +2 -1
- npcsh/npc.py +84 -32
- npcsh/npc_team/alicanto.npc +22 -1
- npcsh/npc_team/corca.npc +28 -9
- npcsh/npc_team/frederic.npc +25 -4
- npcsh/npc_team/guac.npc +22 -0
- npcsh/npc_team/jinxs/bin/nql.jinx +141 -0
- npcsh/npc_team/jinxs/bin/sync.jinx +230 -0
- {npcsh-1.1.14.data/data/npcsh/npc_team → npcsh/npc_team/jinxs/bin}/vixynt.jinx +8 -30
- npcsh/npc_team/jinxs/bin/wander.jinx +152 -0
- npcsh/npc_team/jinxs/lib/browser/browser_action.jinx +220 -0
- npcsh/npc_team/jinxs/lib/browser/browser_screenshot.jinx +40 -0
- npcsh/npc_team/jinxs/lib/browser/close_browser.jinx +14 -0
- npcsh/npc_team/jinxs/lib/browser/open_browser.jinx +43 -0
- npcsh/npc_team/jinxs/lib/computer_use/click.jinx +23 -0
- npcsh/npc_team/jinxs/lib/computer_use/key_press.jinx +26 -0
- npcsh/npc_team/jinxs/lib/computer_use/launch_app.jinx +37 -0
- npcsh/npc_team/jinxs/lib/computer_use/screenshot.jinx +23 -0
- npcsh/npc_team/jinxs/lib/computer_use/type_text.jinx +27 -0
- npcsh/npc_team/jinxs/lib/computer_use/wait.jinx +21 -0
- {npcsh-1.1.14.data/data/npcsh/npc_team → npcsh/npc_team/jinxs/lib/core}/edit_file.jinx +3 -3
- {npcsh-1.1.14.data/data/npcsh/npc_team → npcsh/npc_team/jinxs/lib/core}/load_file.jinx +1 -1
- npcsh/npc_team/jinxs/lib/core/paste.jinx +134 -0
- {npcsh-1.1.14.data/data/npcsh/npc_team → npcsh/npc_team/jinxs/lib/core}/search.jinx +2 -1
- npcsh/npc_team/jinxs/{code → lib/core}/sh.jinx +2 -8
- npcsh/npc_team/jinxs/{code → lib/core}/sql.jinx +1 -1
- npcsh/npc_team/jinxs/lib/orchestration/convene.jinx +232 -0
- npcsh/npc_team/jinxs/lib/orchestration/delegate.jinx +184 -0
- npcsh/npc_team/jinxs/lib/research/arxiv.jinx +76 -0
- npcsh/npc_team/jinxs/lib/research/paper_search.jinx +101 -0
- npcsh/npc_team/jinxs/lib/research/semantic_scholar.jinx +69 -0
- npcsh/npc_team/jinxs/{utils/core → lib/utils}/build.jinx +8 -8
- npcsh/npc_team/jinxs/lib/utils/jinxs.jinx +176 -0
- npcsh/npc_team/jinxs/lib/utils/shh.jinx +17 -0
- npcsh/npc_team/jinxs/lib/utils/switch.jinx +62 -0
- npcsh/npc_team/jinxs/lib/utils/switches.jinx +61 -0
- npcsh/npc_team/jinxs/lib/utils/teamviz.jinx +205 -0
- npcsh/npc_team/jinxs/lib/utils/verbose.jinx +17 -0
- npcsh/npc_team/kadiefa.npc +19 -1
- npcsh/npc_team/plonk.npc +26 -1
- npcsh/npc_team/plonkjr.npc +22 -1
- npcsh/npc_team/sibiji.npc +23 -2
- npcsh/npcsh.py +153 -39
- npcsh/ui.py +22 -1
- npcsh-1.1.16.data/data/npcsh/npc_team/alicanto.npc +23 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/arxiv.jinx +76 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/browser_action.jinx +220 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/browser_screenshot.jinx +40 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/build.jinx +8 -8
- npcsh-1.1.16.data/data/npcsh/npc_team/click.jinx +23 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/close_browser.jinx +14 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/convene.jinx +232 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/corca.npc +31 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/delegate.jinx +184 -0
- {npcsh/npc_team/jinxs/utils → npcsh-1.1.16.data/data/npcsh/npc_team}/edit_file.jinx +3 -3
- npcsh-1.1.16.data/data/npcsh/npc_team/frederic.npc +27 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/guac.npc +22 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/jinxs.jinx +176 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/kadiefa.npc +21 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/key_press.jinx +26 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/launch_app.jinx +37 -0
- {npcsh/npc_team/jinxs/utils → npcsh-1.1.16.data/data/npcsh/npc_team}/load_file.jinx +1 -1
- npcsh-1.1.16.data/data/npcsh/npc_team/nql.jinx +141 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/open_browser.jinx +43 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/paper_search.jinx +101 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/paste.jinx +134 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/plonk.npc +27 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/plonkjr.npc +23 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/screenshot.jinx +23 -0
- {npcsh/npc_team/jinxs/utils → npcsh-1.1.16.data/data/npcsh/npc_team}/search.jinx +2 -1
- npcsh-1.1.16.data/data/npcsh/npc_team/semantic_scholar.jinx +69 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/sh.jinx +2 -8
- npcsh-1.1.16.data/data/npcsh/npc_team/shh.jinx +17 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/sibiji.npc +24 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/sql.jinx +1 -1
- npcsh-1.1.16.data/data/npcsh/npc_team/switch.jinx +62 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/switches.jinx +61 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/sync.jinx +230 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/teamviz.jinx +205 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/type_text.jinx +27 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/verbose.jinx +17 -0
- {npcsh/npc_team/jinxs/utils → npcsh-1.1.16.data/data/npcsh/npc_team}/vixynt.jinx +8 -30
- npcsh-1.1.16.data/data/npcsh/npc_team/wait.jinx +21 -0
- npcsh-1.1.16.data/data/npcsh/npc_team/wander.jinx +152 -0
- {npcsh-1.1.14.dist-info → npcsh-1.1.16.dist-info}/METADATA +399 -58
- npcsh-1.1.16.dist-info/RECORD +170 -0
- npcsh-1.1.16.dist-info/entry_points.txt +19 -0
- npcsh-1.1.16.dist-info/top_level.txt +2 -0
- project/__init__.py +1 -0
- npcsh/npc_team/foreman.npc +0 -7
- npcsh/npc_team/jinxs/modes/alicanto.jinx +0 -194
- npcsh/npc_team/jinxs/modes/corca.jinx +0 -249
- npcsh/npc_team/jinxs/modes/guac.jinx +0 -317
- npcsh/npc_team/jinxs/modes/plonk.jinx +0 -214
- npcsh/npc_team/jinxs/modes/pti.jinx +0 -170
- npcsh/npc_team/jinxs/modes/wander.jinx +0 -186
- npcsh/npc_team/jinxs/utils/agent.jinx +0 -17
- npcsh/npc_team/jinxs/utils/core/jinxs.jinx +0 -32
- npcsh-1.1.14.data/data/npcsh/npc_team/agent.jinx +0 -17
- npcsh-1.1.14.data/data/npcsh/npc_team/alicanto.jinx +0 -194
- npcsh-1.1.14.data/data/npcsh/npc_team/alicanto.npc +0 -2
- npcsh-1.1.14.data/data/npcsh/npc_team/corca.jinx +0 -249
- npcsh-1.1.14.data/data/npcsh/npc_team/corca.npc +0 -12
- npcsh-1.1.14.data/data/npcsh/npc_team/foreman.npc +0 -7
- npcsh-1.1.14.data/data/npcsh/npc_team/frederic.npc +0 -6
- npcsh-1.1.14.data/data/npcsh/npc_team/guac.jinx +0 -317
- npcsh-1.1.14.data/data/npcsh/npc_team/jinxs.jinx +0 -32
- npcsh-1.1.14.data/data/npcsh/npc_team/kadiefa.npc +0 -3
- npcsh-1.1.14.data/data/npcsh/npc_team/plonk.jinx +0 -214
- npcsh-1.1.14.data/data/npcsh/npc_team/plonk.npc +0 -2
- npcsh-1.1.14.data/data/npcsh/npc_team/plonkjr.npc +0 -2
- npcsh-1.1.14.data/data/npcsh/npc_team/pti.jinx +0 -170
- npcsh-1.1.14.data/data/npcsh/npc_team/sibiji.npc +0 -3
- npcsh-1.1.14.data/data/npcsh/npc_team/wander.jinx +0 -186
- npcsh-1.1.14.dist-info/RECORD +0 -135
- npcsh-1.1.14.dist-info/entry_points.txt +0 -9
- npcsh-1.1.14.dist-info/top_level.txt +0 -1
- /npcsh/npc_team/jinxs/{utils → bin}/roll.jinx +0 -0
- /npcsh/npc_team/jinxs/{utils → bin}/sample.jinx +0 -0
- /npcsh/npc_team/jinxs/{modes → bin}/spool.jinx +0 -0
- /npcsh/npc_team/jinxs/{modes → bin}/yap.jinx +0 -0
- /npcsh/npc_team/jinxs/{utils → lib/computer_use}/trigger.jinx +0 -0
- /npcsh/npc_team/jinxs/{utils → lib/core}/chat.jinx +0 -0
- /npcsh/npc_team/jinxs/{utils → lib/core}/cmd.jinx +0 -0
- /npcsh/npc_team/jinxs/{utils → lib/core}/compress.jinx +0 -0
- /npcsh/npc_team/jinxs/{utils → lib/core}/ots.jinx +0 -0
- /npcsh/npc_team/jinxs/{code → lib/core}/python.jinx +0 -0
- /npcsh/npc_team/jinxs/{utils → lib/core}/sleep.jinx +0 -0
- /npcsh/npc_team/jinxs/{utils/core → lib/utils}/compile.jinx +0 -0
- /npcsh/npc_team/jinxs/{utils/core → lib/utils}/help.jinx +0 -0
- /npcsh/npc_team/jinxs/{utils/core → lib/utils}/init.jinx +0 -0
- /npcsh/npc_team/jinxs/{utils → lib/utils}/serve.jinx +0 -0
- /npcsh/npc_team/jinxs/{utils/core → lib/utils}/set.jinx +0 -0
- /npcsh/npc_team/jinxs/{utils → lib/utils}/usage.jinx +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/alicanto.png +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/chat.jinx +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/cmd.jinx +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/compile.jinx +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/compress.jinx +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/corca.png +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/corca_example.png +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/frederic4.png +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/guac.png +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/help.jinx +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/init.jinx +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/kadiefa.png +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/npc-studio.jinx +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/npcsh.ctx +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/npcsh_sibiji.png +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/ots.jinx +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/plonk.png +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/plonkjr.png +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/python.jinx +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/roll.jinx +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/sample.jinx +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/serve.jinx +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/set.jinx +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/sibiji.png +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/sleep.jinx +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/spool.jinx +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/spool.png +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/trigger.jinx +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/usage.jinx +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/yap.jinx +0 -0
- {npcsh-1.1.14.data → npcsh-1.1.16.data}/data/npcsh/npc_team/yap.png +0 -0
- {npcsh-1.1.14.dist-info → npcsh-1.1.16.dist-info}/WHEEL +0 -0
- {npcsh-1.1.14.dist-info → npcsh-1.1.16.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
jinx_name: arxiv
|
|
2
|
+
description: Search arXiv for preprints and papers
|
|
3
|
+
inputs:
|
|
4
|
+
- query: ""
|
|
5
|
+
- limit: 10
|
|
6
|
+
steps:
|
|
7
|
+
- name: search_arxiv
|
|
8
|
+
engine: python
|
|
9
|
+
code: |
|
|
10
|
+
import urllib.request
|
|
11
|
+
import urllib.parse
|
|
12
|
+
import xml.etree.ElementTree as ET
|
|
13
|
+
|
|
14
|
+
query = context.get('query', '')
|
|
15
|
+
limit = int(context.get('limit', 10))
|
|
16
|
+
|
|
17
|
+
if not query:
|
|
18
|
+
context['output'] = "Usage: /arxiv <query> [--limit N]"
|
|
19
|
+
exit()
|
|
20
|
+
|
|
21
|
+
base_url = "http://export.arxiv.org/api/query"
|
|
22
|
+
params = {
|
|
23
|
+
"search_query": f"all:{query}",
|
|
24
|
+
"start": 0,
|
|
25
|
+
"max_results": limit,
|
|
26
|
+
"sortBy": "relevance",
|
|
27
|
+
"sortOrder": "descending"
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
url = f"{base_url}?{urllib.parse.urlencode(params)}"
|
|
31
|
+
|
|
32
|
+
try:
|
|
33
|
+
with urllib.request.urlopen(url, timeout=30) as response:
|
|
34
|
+
data = response.read().decode('utf-8')
|
|
35
|
+
|
|
36
|
+
root = ET.fromstring(data)
|
|
37
|
+
ns = {'atom': 'http://www.w3.org/2005/Atom'}
|
|
38
|
+
|
|
39
|
+
entries = root.findall('atom:entry', ns)
|
|
40
|
+
|
|
41
|
+
if not entries:
|
|
42
|
+
context['output'] = f"No papers found for: {query}"
|
|
43
|
+
exit()
|
|
44
|
+
|
|
45
|
+
results = []
|
|
46
|
+
papers = []
|
|
47
|
+
for i, entry in enumerate(entries, 1):
|
|
48
|
+
title = entry.find('atom:title', ns).text.strip().replace('\n', ' ')
|
|
49
|
+
summary = entry.find('atom:summary', ns).text.strip()[:300] + '...'
|
|
50
|
+
published = entry.find('atom:published', ns).text[:10]
|
|
51
|
+
authors = [a.find('atom:name', ns).text for a in entry.findall('atom:author', ns)]
|
|
52
|
+
author_str = ', '.join(authors[:3])
|
|
53
|
+
if len(authors) > 3:
|
|
54
|
+
author_str += ' et al.'
|
|
55
|
+
link = entry.find('atom:id', ns).text
|
|
56
|
+
|
|
57
|
+
results.append(f"{i}. {title}")
|
|
58
|
+
results.append(f" Authors: {author_str}")
|
|
59
|
+
results.append(f" Published: {published}")
|
|
60
|
+
results.append(f" Abstract: {summary}")
|
|
61
|
+
results.append(f" URL: {link}")
|
|
62
|
+
results.append("")
|
|
63
|
+
|
|
64
|
+
papers.append({
|
|
65
|
+
'title': title,
|
|
66
|
+
'authors': authors,
|
|
67
|
+
'abstract': entry.find('atom:summary', ns).text.strip(),
|
|
68
|
+
'published': published,
|
|
69
|
+
'url': link
|
|
70
|
+
})
|
|
71
|
+
|
|
72
|
+
context['output'] = f"Found {len(entries)} papers on arXiv:\n\n" + "\n".join(results)
|
|
73
|
+
context['papers'] = papers
|
|
74
|
+
|
|
75
|
+
except Exception as e:
|
|
76
|
+
context['output'] = f"arXiv search error: {e}"
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
jinx_name: paper_search
|
|
2
|
+
description: Search for academic papers across multiple sources (Semantic Scholar, arXiv, local datasets)
|
|
3
|
+
inputs:
|
|
4
|
+
- query: ""
|
|
5
|
+
- limit: 10
|
|
6
|
+
- source: "all"
|
|
7
|
+
steps:
|
|
8
|
+
- name: search_papers
|
|
9
|
+
engine: python
|
|
10
|
+
code: |
|
|
11
|
+
import os
|
|
12
|
+
import time
|
|
13
|
+
import requests
|
|
14
|
+
import urllib.request
|
|
15
|
+
import urllib.parse
|
|
16
|
+
import xml.etree.ElementTree as ET
|
|
17
|
+
|
|
18
|
+
query = context.get('query', '')
|
|
19
|
+
limit = int(context.get('limit', 10))
|
|
20
|
+
source = context.get('source', 'all').lower()
|
|
21
|
+
|
|
22
|
+
if not query:
|
|
23
|
+
context['output'] = """Usage: /paper_search <query> [--limit N] [--source SOURCE]
|
|
24
|
+
|
|
25
|
+
Sources:
|
|
26
|
+
all - Search all available sources (default)
|
|
27
|
+
s2 - Semantic Scholar only (requires S2_API_KEY)
|
|
28
|
+
arxiv - arXiv only
|
|
29
|
+
"""
|
|
30
|
+
exit()
|
|
31
|
+
|
|
32
|
+
all_results = []
|
|
33
|
+
|
|
34
|
+
# Semantic Scholar
|
|
35
|
+
if source in ['all', 's2']:
|
|
36
|
+
api_key = os.environ.get('S2_API_KEY')
|
|
37
|
+
if api_key:
|
|
38
|
+
try:
|
|
39
|
+
url = "https://api.semanticscholar.org/graph/v1/paper/search"
|
|
40
|
+
headers = {"x-api-key": api_key}
|
|
41
|
+
params = {"query": query, "limit": limit, "fields": "title,abstract,authors,year,citationCount,url"}
|
|
42
|
+
response = requests.get(url, headers=headers, params=params, timeout=30)
|
|
43
|
+
response.raise_for_status()
|
|
44
|
+
for paper in response.json().get('data', []):
|
|
45
|
+
all_results.append({
|
|
46
|
+
'source': 'Semantic Scholar',
|
|
47
|
+
'title': paper.get('title', ''),
|
|
48
|
+
'year': paper.get('year'),
|
|
49
|
+
'citations': paper.get('citationCount', 0),
|
|
50
|
+
'authors': [a.get('name', '') for a in paper.get('authors', [])],
|
|
51
|
+
'abstract': paper.get('abstract', '')[:300] if paper.get('abstract') else '',
|
|
52
|
+
'url': paper.get('url', '')
|
|
53
|
+
})
|
|
54
|
+
except Exception as e:
|
|
55
|
+
print(f"S2 error: {e}")
|
|
56
|
+
|
|
57
|
+
# arXiv
|
|
58
|
+
if source in ['all', 'arxiv']:
|
|
59
|
+
try:
|
|
60
|
+
base_url = "http://export.arxiv.org/api/query"
|
|
61
|
+
params = {"search_query": f"all:{query}", "max_results": limit}
|
|
62
|
+
url = f"{base_url}?{urllib.parse.urlencode(params)}"
|
|
63
|
+
with urllib.request.urlopen(url, timeout=30) as response:
|
|
64
|
+
data = response.read().decode('utf-8')
|
|
65
|
+
root = ET.fromstring(data)
|
|
66
|
+
ns = {'atom': 'http://www.w3.org/2005/Atom'}
|
|
67
|
+
for entry in root.findall('atom:entry', ns):
|
|
68
|
+
all_results.append({
|
|
69
|
+
'source': 'arXiv',
|
|
70
|
+
'title': entry.find('atom:title', ns).text.strip().replace('\n', ' '),
|
|
71
|
+
'year': entry.find('atom:published', ns).text[:4],
|
|
72
|
+
'citations': None,
|
|
73
|
+
'authors': [a.find('atom:name', ns).text for a in entry.findall('atom:author', ns)],
|
|
74
|
+
'abstract': entry.find('atom:summary', ns).text.strip()[:300],
|
|
75
|
+
'url': entry.find('atom:id', ns).text
|
|
76
|
+
})
|
|
77
|
+
except Exception as e:
|
|
78
|
+
print(f"arXiv error: {e}")
|
|
79
|
+
|
|
80
|
+
if not all_results:
|
|
81
|
+
context['output'] = f"No papers found for: {query}"
|
|
82
|
+
exit()
|
|
83
|
+
|
|
84
|
+
# Format output
|
|
85
|
+
results = []
|
|
86
|
+
for i, paper in enumerate(all_results[:limit], 1):
|
|
87
|
+
authors = ', '.join(paper['authors'][:3])
|
|
88
|
+
if len(paper['authors']) > 3:
|
|
89
|
+
authors += ' et al.'
|
|
90
|
+
year = paper.get('year', '?')
|
|
91
|
+
citations = f", {paper['citations']} citations" if paper.get('citations') else ""
|
|
92
|
+
|
|
93
|
+
results.append(f"{i}. [{paper['source']}] {paper['title']} ({year}{citations})")
|
|
94
|
+
results.append(f" Authors: {authors}")
|
|
95
|
+
if paper['abstract']:
|
|
96
|
+
results.append(f" Abstract: {paper['abstract']}...")
|
|
97
|
+
results.append(f" URL: {paper['url']}")
|
|
98
|
+
results.append("")
|
|
99
|
+
|
|
100
|
+
context['output'] = f"Found {len(all_results)} papers:\n\n" + "\n".join(results)
|
|
101
|
+
context['papers'] = all_results
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
jinx_name: semantic_scholar
|
|
2
|
+
description: Search Semantic Scholar for academic papers. Requires S2_API_KEY env var.
|
|
3
|
+
inputs:
|
|
4
|
+
- query: ""
|
|
5
|
+
- limit: 10
|
|
6
|
+
steps:
|
|
7
|
+
- name: search_s2
|
|
8
|
+
engine: python
|
|
9
|
+
code: |
|
|
10
|
+
import os
|
|
11
|
+
import time
|
|
12
|
+
import requests
|
|
13
|
+
|
|
14
|
+
query = context.get('query', '')
|
|
15
|
+
limit = int(context.get('limit', 10))
|
|
16
|
+
|
|
17
|
+
if not query:
|
|
18
|
+
context['output'] = "Usage: /semantic_scholar <query> [--limit N]"
|
|
19
|
+
exit()
|
|
20
|
+
|
|
21
|
+
api_key = os.environ.get('S2_API_KEY')
|
|
22
|
+
if not api_key:
|
|
23
|
+
context['output'] = "Error: S2_API_KEY environment variable not set. Get one at https://www.semanticscholar.org/product/api"
|
|
24
|
+
exit()
|
|
25
|
+
|
|
26
|
+
url = "https://api.semanticscholar.org/graph/v1/paper/search"
|
|
27
|
+
headers = {"x-api-key": api_key}
|
|
28
|
+
params = {
|
|
29
|
+
"query": query,
|
|
30
|
+
"limit": limit,
|
|
31
|
+
"fields": "title,abstract,authors,year,citationCount,url,tldr"
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
try:
|
|
35
|
+
response = requests.get(url, headers=headers, params=params, timeout=30)
|
|
36
|
+
response.raise_for_status()
|
|
37
|
+
data = response.json().get('data', [])
|
|
38
|
+
|
|
39
|
+
if not data:
|
|
40
|
+
context['output'] = f"No papers found for: {query}"
|
|
41
|
+
exit()
|
|
42
|
+
|
|
43
|
+
results = []
|
|
44
|
+
for i, paper in enumerate(data, 1):
|
|
45
|
+
title = paper.get('title', 'No title')
|
|
46
|
+
year = paper.get('year', '?')
|
|
47
|
+
citations = paper.get('citationCount', 0)
|
|
48
|
+
authors = ', '.join([a.get('name', '') for a in paper.get('authors', [])[:3]])
|
|
49
|
+
if len(paper.get('authors', [])) > 3:
|
|
50
|
+
authors += ' et al.'
|
|
51
|
+
abstract = paper.get('abstract', '')[:200] + '...' if paper.get('abstract') else 'No abstract'
|
|
52
|
+
tldr = paper.get('tldr', {}).get('text', '') if paper.get('tldr') else ''
|
|
53
|
+
url = paper.get('url', '')
|
|
54
|
+
|
|
55
|
+
results.append(f"{i}. {title} ({year})")
|
|
56
|
+
results.append(f" Authors: {authors}")
|
|
57
|
+
results.append(f" Citations: {citations}")
|
|
58
|
+
if tldr:
|
|
59
|
+
results.append(f" TL;DR: {tldr}")
|
|
60
|
+
else:
|
|
61
|
+
results.append(f" Abstract: {abstract}")
|
|
62
|
+
results.append(f" URL: {url}")
|
|
63
|
+
results.append("")
|
|
64
|
+
|
|
65
|
+
context['output'] = f"Found {len(data)} papers:\n\n" + "\n".join(results)
|
|
66
|
+
context['papers'] = data
|
|
67
|
+
|
|
68
|
+
except requests.exceptions.RequestException as e:
|
|
69
|
+
context['output'] = f"Semantic Scholar API error: {e}"
|
|
@@ -2,7 +2,7 @@ jinx_name: "build"
|
|
|
2
2
|
description: "Build deployment artifacts for NPC team"
|
|
3
3
|
inputs:
|
|
4
4
|
- target: "flask" # The type of deployment target (e.g., flask, docker, cli, static).
|
|
5
|
-
-
|
|
5
|
+
- outdir: "./build" # The output directory for built artifacts.
|
|
6
6
|
- team: "./npc_team" # The path to the NPC team directory.
|
|
7
7
|
- port: 5337 # The port for flask server builds.
|
|
8
8
|
- cors: "" # Comma-separated CORS origins for flask server builds.
|
|
@@ -28,13 +28,13 @@ steps:
|
|
|
28
28
|
def build_cli_executable(config, **kwargs): return {"output": f"Mock build cli: {config}", "messages": []}
|
|
29
29
|
def build_static_site(config, **kwargs): return {"output": f"Mock build static: {config}", "messages": []}
|
|
30
30
|
|
|
31
|
-
target = context.get('target')
|
|
32
|
-
output_dir = context.get('
|
|
33
|
-
team_path = context.get('team')
|
|
34
|
-
port = context.get('port')
|
|
35
|
-
cors_origins_str = context.get('cors')
|
|
36
|
-
|
|
37
|
-
cors_origins = [origin.strip() for origin in cors_origins_str.split(',')
|
|
31
|
+
target = context.get('target') or 'flask'
|
|
32
|
+
output_dir = context.get('outdir') or './build'
|
|
33
|
+
team_path = context.get('team') or './npc_team'
|
|
34
|
+
port = context.get('port') or 5337
|
|
35
|
+
cors_origins_str = context.get('cors') or ''
|
|
36
|
+
|
|
37
|
+
cors_origins = [origin.strip() for origin in cors_origins_str.split(',') if origin.strip()] or None
|
|
38
38
|
|
|
39
39
|
build_config = {
|
|
40
40
|
'team_path': os.path.abspath(os.path.expanduser(team_path)),
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
jinx_name: jinxs
|
|
2
|
+
description: "Show available jinxs organized by folder. Use /jinxs <path> for details on a specific folder."
|
|
3
|
+
inputs:
|
|
4
|
+
- path: "" # Optional path to show details for (e.g., "lib/core", "bin")
|
|
5
|
+
steps:
|
|
6
|
+
- name: list_jinxs
|
|
7
|
+
engine: python
|
|
8
|
+
code: |
|
|
9
|
+
import os
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
import yaml
|
|
12
|
+
|
|
13
|
+
filter_path = context.get('path', '').strip()
|
|
14
|
+
|
|
15
|
+
# Find jinxs directory from team or fallback
|
|
16
|
+
jinxs_dir = None
|
|
17
|
+
if hasattr(npc, 'team') and npc.team:
|
|
18
|
+
if hasattr(npc.team, 'jinxs_dir') and npc.team.jinxs_dir:
|
|
19
|
+
jinxs_dir = Path(npc.team.jinxs_dir)
|
|
20
|
+
elif hasattr(npc.team, 'team_path') and npc.team.team_path:
|
|
21
|
+
candidate = Path(npc.team.team_path) / "jinxs"
|
|
22
|
+
if candidate.exists():
|
|
23
|
+
jinxs_dir = candidate
|
|
24
|
+
|
|
25
|
+
if not jinxs_dir:
|
|
26
|
+
# Fallback to global jinxs
|
|
27
|
+
global_jinxs = Path.home() / ".npcsh" / "npc_team" / "jinxs"
|
|
28
|
+
if global_jinxs.exists():
|
|
29
|
+
jinxs_dir = global_jinxs
|
|
30
|
+
|
|
31
|
+
if not jinxs_dir or not jinxs_dir.exists():
|
|
32
|
+
output = "Error: Could not find jinxs directory"
|
|
33
|
+
exit()
|
|
34
|
+
|
|
35
|
+
def get_jinx_info(jinx_path):
|
|
36
|
+
"""Extract name and description from a jinx file."""
|
|
37
|
+
try:
|
|
38
|
+
with open(jinx_path, 'r') as f:
|
|
39
|
+
content = f.read()
|
|
40
|
+
# Parse just the header (before steps:)
|
|
41
|
+
header = content.split('steps:')[0] if 'steps:' in content else content
|
|
42
|
+
data = yaml.safe_load(header)
|
|
43
|
+
name = data.get('jinx_name', jinx_path.stem)
|
|
44
|
+
desc = data.get('description', 'No description')
|
|
45
|
+
return name, desc
|
|
46
|
+
except:
|
|
47
|
+
return jinx_path.stem, 'No description'
|
|
48
|
+
|
|
49
|
+
def get_folder_structure(base_path):
|
|
50
|
+
"""Get jinxs organized by folder."""
|
|
51
|
+
structure = {}
|
|
52
|
+
for root, dirs, files in os.walk(base_path):
|
|
53
|
+
# Skip hidden directories
|
|
54
|
+
dirs[:] = [d for d in dirs if not d.startswith('.')]
|
|
55
|
+
|
|
56
|
+
jinx_files = [f for f in files if f.endswith('.jinx')]
|
|
57
|
+
if jinx_files:
|
|
58
|
+
rel_path = Path(root).relative_to(base_path)
|
|
59
|
+
rel_str = str(rel_path) if str(rel_path) != '.' else 'root'
|
|
60
|
+
structure[rel_str] = []
|
|
61
|
+
for jf in sorted(jinx_files):
|
|
62
|
+
jinx_path = Path(root) / jf
|
|
63
|
+
name, desc = get_jinx_info(jinx_path)
|
|
64
|
+
structure[rel_str].append((name, desc, jf))
|
|
65
|
+
return structure
|
|
66
|
+
|
|
67
|
+
output_lines = []
|
|
68
|
+
|
|
69
|
+
if filter_path:
|
|
70
|
+
# Show details for a specific path
|
|
71
|
+
target_path = jinxs_dir / filter_path
|
|
72
|
+
if not target_path.exists():
|
|
73
|
+
# Try to find a matching folder
|
|
74
|
+
matches = []
|
|
75
|
+
for root, dirs, files in os.walk(jinxs_dir):
|
|
76
|
+
rel = Path(root).relative_to(jinxs_dir)
|
|
77
|
+
if filter_path in str(rel) or filter_path in Path(root).name:
|
|
78
|
+
matches.append(rel)
|
|
79
|
+
|
|
80
|
+
if matches:
|
|
81
|
+
output_lines.append(f"No exact match for '{filter_path}'. Did you mean:\n")
|
|
82
|
+
for m in matches[:5]:
|
|
83
|
+
output_lines.append(f" /jinxs {m}\n")
|
|
84
|
+
output = "".join(output_lines)
|
|
85
|
+
exit()
|
|
86
|
+
else:
|
|
87
|
+
output = f"No jinxs found at path: {filter_path}"
|
|
88
|
+
exit()
|
|
89
|
+
|
|
90
|
+
# Get jinxs in this path
|
|
91
|
+
structure = get_folder_structure(target_path)
|
|
92
|
+
if not structure:
|
|
93
|
+
# Check if it's a single folder with jinxs
|
|
94
|
+
jinx_files = list(target_path.glob("*.jinx"))
|
|
95
|
+
if jinx_files:
|
|
96
|
+
output_lines.append(f"Jinxs in {filter_path}:\n\n")
|
|
97
|
+
for jf in sorted(jinx_files):
|
|
98
|
+
name, desc = get_jinx_info(jf)
|
|
99
|
+
output_lines.append(f" /{name}\n")
|
|
100
|
+
output_lines.append(f" {desc}\n\n")
|
|
101
|
+
else:
|
|
102
|
+
output = f"No jinxs found at path: {filter_path}"
|
|
103
|
+
exit()
|
|
104
|
+
else:
|
|
105
|
+
output_lines.append(f"Jinxs in {filter_path}:\n\n")
|
|
106
|
+
for folder, jinxs in sorted(structure.items()):
|
|
107
|
+
if folder != 'root':
|
|
108
|
+
output_lines.append(f" {folder}/\n")
|
|
109
|
+
for name, desc, filename in jinxs:
|
|
110
|
+
prefix = " " if folder != 'root' else " "
|
|
111
|
+
output_lines.append(f"{prefix}/{name} - {desc}\n")
|
|
112
|
+
output_lines.append("\n")
|
|
113
|
+
|
|
114
|
+
else:
|
|
115
|
+
# Show overview organized by folder
|
|
116
|
+
structure = get_folder_structure(jinxs_dir)
|
|
117
|
+
|
|
118
|
+
output_lines.append("Available Jinxs\n")
|
|
119
|
+
output_lines.append("=" * 40 + "\n\n")
|
|
120
|
+
|
|
121
|
+
# Group by top-level folder
|
|
122
|
+
top_level = {}
|
|
123
|
+
for folder, jinxs in structure.items():
|
|
124
|
+
if folder == 'root':
|
|
125
|
+
top = 'root'
|
|
126
|
+
else:
|
|
127
|
+
top = folder.split('/')[0] if '/' in folder else folder
|
|
128
|
+
|
|
129
|
+
if top not in top_level:
|
|
130
|
+
top_level[top] = {'subfolders': {}, 'jinxs': []}
|
|
131
|
+
|
|
132
|
+
if folder == top or folder == 'root':
|
|
133
|
+
top_level[top]['jinxs'].extend(jinxs)
|
|
134
|
+
else:
|
|
135
|
+
subfolder = '/'.join(folder.split('/')[1:])
|
|
136
|
+
if subfolder not in top_level[top]['subfolders']:
|
|
137
|
+
top_level[top]['subfolders'][subfolder] = []
|
|
138
|
+
top_level[top]['subfolders'][subfolder].extend(jinxs)
|
|
139
|
+
|
|
140
|
+
# Display
|
|
141
|
+
folder_order = ['bin', 'lib', 'npc_studio', 'root']
|
|
142
|
+
sorted_folders = sorted(top_level.keys(), key=lambda x: (folder_order.index(x) if x in folder_order else 99, x))
|
|
143
|
+
|
|
144
|
+
for top in sorted_folders:
|
|
145
|
+
data = top_level[top]
|
|
146
|
+
|
|
147
|
+
if top == 'root':
|
|
148
|
+
if data['jinxs']:
|
|
149
|
+
output_lines.append("Root Jinxs:\n")
|
|
150
|
+
for name, desc, _ in data['jinxs']:
|
|
151
|
+
output_lines.append(f" /{name} - {desc}\n")
|
|
152
|
+
output_lines.append("\n")
|
|
153
|
+
else:
|
|
154
|
+
total = len(data['jinxs'])
|
|
155
|
+
for sf_jinxs in data['subfolders'].values():
|
|
156
|
+
total += len(sf_jinxs)
|
|
157
|
+
|
|
158
|
+
output_lines.append(f"{top}/ ({total} jinxs)\n")
|
|
159
|
+
|
|
160
|
+
# Show direct jinxs
|
|
161
|
+
if data['jinxs']:
|
|
162
|
+
for name, desc, _ in data['jinxs'][:3]:
|
|
163
|
+
output_lines.append(f" /{name} - {desc}\n")
|
|
164
|
+
if len(data['jinxs']) > 3:
|
|
165
|
+
output_lines.append(f" ... and {len(data['jinxs']) - 3} more\n")
|
|
166
|
+
|
|
167
|
+
# Show subfolders summary
|
|
168
|
+
if data['subfolders']:
|
|
169
|
+
for subfolder, jinxs in sorted(data['subfolders'].items()):
|
|
170
|
+
output_lines.append(f" {subfolder}/ ({len(jinxs)} jinxs)\n")
|
|
171
|
+
|
|
172
|
+
output_lines.append(f" → /jinxs {top} for details\n\n")
|
|
173
|
+
|
|
174
|
+
output_lines.append("Use /jinxs <path> for details (e.g., /jinxs lib/core)\n")
|
|
175
|
+
|
|
176
|
+
output = "".join(output_lines)
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
jinx_name: "shh"
|
|
2
|
+
description: "Enable silent mode - only shows warnings and errors"
|
|
3
|
+
inputs: []
|
|
4
|
+
steps:
|
|
5
|
+
- name: "set_silent"
|
|
6
|
+
engine: "python"
|
|
7
|
+
code: |
|
|
8
|
+
state = context.get('state')
|
|
9
|
+
output_messages = context.get('messages', [])
|
|
10
|
+
|
|
11
|
+
if state:
|
|
12
|
+
result = state.set_log_level("silent")
|
|
13
|
+
context['output'] = result
|
|
14
|
+
else:
|
|
15
|
+
context['output'] = "Error: state not available"
|
|
16
|
+
|
|
17
|
+
context['messages'] = output_messages
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
jinx_name: switch
|
|
2
|
+
description: Get or set a switch in the .ctx file
|
|
3
|
+
inputs:
|
|
4
|
+
- name: "" # Switch name
|
|
5
|
+
- value: null # Value to set (omit to get current value)
|
|
6
|
+
- scope: "workspace" # "workspace" or "global"
|
|
7
|
+
|
|
8
|
+
steps:
|
|
9
|
+
- name: manage_switch
|
|
10
|
+
engine: python
|
|
11
|
+
code: |
|
|
12
|
+
import os
|
|
13
|
+
import yaml
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
|
|
16
|
+
switch_name = context.get('name', '')
|
|
17
|
+
switch_value = context.get('value')
|
|
18
|
+
scope = context.get('scope', 'workspace')
|
|
19
|
+
messages = context.get('messages', [])
|
|
20
|
+
|
|
21
|
+
if not switch_name:
|
|
22
|
+
context['output'] = "Usage: /switch <name> [value] [--scope workspace|global]"
|
|
23
|
+
context['messages'] = messages
|
|
24
|
+
exit()
|
|
25
|
+
|
|
26
|
+
# Determine .ctx path based on scope
|
|
27
|
+
if scope == "global":
|
|
28
|
+
ctx_path = Path.home() / ".npcsh" / "npc_team" / "npcsh.ctx"
|
|
29
|
+
else:
|
|
30
|
+
cwd = os.getcwd()
|
|
31
|
+
# Look for npc_team/*.ctx in workspace
|
|
32
|
+
npc_team_dir = Path(cwd) / "npc_team"
|
|
33
|
+
ctx_files = list(npc_team_dir.glob("*.ctx")) if npc_team_dir.exists() else []
|
|
34
|
+
ctx_path = ctx_files[0] if ctx_files else npc_team_dir / "team.ctx"
|
|
35
|
+
|
|
36
|
+
# Load existing ctx
|
|
37
|
+
ctx_data = {}
|
|
38
|
+
if ctx_path.exists():
|
|
39
|
+
try:
|
|
40
|
+
with open(ctx_path) as f:
|
|
41
|
+
ctx_data = yaml.safe_load(f) or {}
|
|
42
|
+
except:
|
|
43
|
+
ctx_data = {}
|
|
44
|
+
|
|
45
|
+
# Ensure switches dict exists
|
|
46
|
+
if 'switches' not in ctx_data:
|
|
47
|
+
ctx_data['switches'] = {}
|
|
48
|
+
|
|
49
|
+
if switch_value is None:
|
|
50
|
+
# Get mode
|
|
51
|
+
current = ctx_data['switches'].get(switch_name, "not set")
|
|
52
|
+
context['output'] = f"{switch_name} ({scope}): {current}"
|
|
53
|
+
else:
|
|
54
|
+
# Set mode
|
|
55
|
+
ctx_path.parent.mkdir(parents=True, exist_ok=True)
|
|
56
|
+
ctx_data['switches'][switch_name] = switch_value
|
|
57
|
+
with open(ctx_path, 'w') as f:
|
|
58
|
+
yaml.dump(ctx_data, f, default_flow_style=False)
|
|
59
|
+
context['output'] = f"Set {switch_name} = {switch_value} ({scope})"
|
|
60
|
+
|
|
61
|
+
context['messages'] = messages
|
|
62
|
+
context['switch_value'] = ctx_data['switches'].get(switch_name)
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
jinx_name: switches
|
|
2
|
+
description: List all switches from .ctx files
|
|
3
|
+
inputs:
|
|
4
|
+
- scope: "all" # "workspace", "global", or "all"
|
|
5
|
+
|
|
6
|
+
steps:
|
|
7
|
+
- name: list_switches
|
|
8
|
+
engine: python
|
|
9
|
+
code: |
|
|
10
|
+
import os
|
|
11
|
+
import yaml
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
|
|
14
|
+
scope = context.get('scope', 'all')
|
|
15
|
+
messages = context.get('messages', [])
|
|
16
|
+
|
|
17
|
+
output_lines = []
|
|
18
|
+
|
|
19
|
+
# Global switches
|
|
20
|
+
if scope in ["global", "all"]:
|
|
21
|
+
global_ctx = Path.home() / ".npcsh" / "npc_team" / "npcsh.ctx"
|
|
22
|
+
if global_ctx.exists():
|
|
23
|
+
try:
|
|
24
|
+
with open(global_ctx) as f:
|
|
25
|
+
data = yaml.safe_load(f) or {}
|
|
26
|
+
switches = data.get('switches', {})
|
|
27
|
+
if switches:
|
|
28
|
+
output_lines.append("Global switches:")
|
|
29
|
+
for k, v in switches.items():
|
|
30
|
+
output_lines.append(f" {k}: {v}")
|
|
31
|
+
else:
|
|
32
|
+
output_lines.append("Global switches: (none)")
|
|
33
|
+
except:
|
|
34
|
+
output_lines.append("Global switches: (none)")
|
|
35
|
+
else:
|
|
36
|
+
output_lines.append("Global switches: (none)")
|
|
37
|
+
|
|
38
|
+
# Workspace switches
|
|
39
|
+
if scope in ["workspace", "all"]:
|
|
40
|
+
cwd = os.getcwd()
|
|
41
|
+
npc_team_dir = Path(cwd) / "npc_team"
|
|
42
|
+
ctx_files = list(npc_team_dir.glob("*.ctx")) if npc_team_dir.exists() else []
|
|
43
|
+
|
|
44
|
+
if ctx_files:
|
|
45
|
+
try:
|
|
46
|
+
with open(ctx_files[0]) as f:
|
|
47
|
+
data = yaml.safe_load(f) or {}
|
|
48
|
+
switches = data.get('switches', {})
|
|
49
|
+
if switches:
|
|
50
|
+
output_lines.append(f"Workspace switches:")
|
|
51
|
+
for k, v in switches.items():
|
|
52
|
+
output_lines.append(f" {k}: {v}")
|
|
53
|
+
else:
|
|
54
|
+
output_lines.append("Workspace switches: (none)")
|
|
55
|
+
except:
|
|
56
|
+
output_lines.append("Workspace switches: (none)")
|
|
57
|
+
else:
|
|
58
|
+
output_lines.append("Workspace switches: (none)")
|
|
59
|
+
|
|
60
|
+
context['output'] = "\n".join(output_lines) if output_lines else "No switches configured."
|
|
61
|
+
context['messages'] = messages
|