npcsh 1.1.20__py3-none-any.whl → 1.1.21__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (166) hide show
  1. npcsh/_state.py +5 -71
  2. npcsh/diff_viewer.py +3 -3
  3. npcsh/npc_team/jinxs/lib/core/compress.jinx +373 -85
  4. npcsh/npc_team/jinxs/lib/core/search/db_search.jinx +17 -6
  5. npcsh/npc_team/jinxs/lib/core/search/file_search.jinx +17 -6
  6. npcsh/npc_team/jinxs/lib/core/search/kg_search.jinx +19 -8
  7. npcsh/npc_team/jinxs/lib/core/search/web_search.jinx +52 -14
  8. npcsh/npc_team/jinxs/{bin → lib/utils}/benchmark.jinx +2 -2
  9. npcsh/npc_team/jinxs/{bin → lib/utils}/jinxs.jinx +12 -12
  10. npcsh/npc_team/jinxs/{bin → lib/utils}/models.jinx +7 -7
  11. npcsh/npc_team/jinxs/{bin → lib/utils}/setup.jinx +6 -6
  12. npcsh/npc_team/jinxs/modes/alicanto.jinx +1573 -296
  13. npcsh/npc_team/jinxs/modes/arxiv.jinx +5 -5
  14. npcsh/npc_team/jinxs/modes/config_tui.jinx +300 -0
  15. npcsh/npc_team/jinxs/modes/corca.jinx +3 -3
  16. npcsh/npc_team/jinxs/modes/git.jinx +795 -0
  17. {npcsh-1.1.20.data/data/npcsh/npc_team → npcsh/npc_team/jinxs/modes}/kg.jinx +13 -13
  18. npcsh/npc_team/jinxs/modes/memories.jinx +414 -0
  19. npcsh/npc_team/jinxs/{bin → modes}/nql.jinx +10 -21
  20. npcsh/npc_team/jinxs/modes/papers.jinx +578 -0
  21. npcsh/npc_team/jinxs/modes/plonk.jinx +490 -304
  22. npcsh/npc_team/jinxs/modes/reattach.jinx +3 -3
  23. npcsh/npc_team/jinxs/modes/spool.jinx +3 -3
  24. npcsh/npc_team/jinxs/{bin → modes}/team.jinx +12 -12
  25. npcsh/npc_team/jinxs/modes/vixynt.jinx +388 -0
  26. npcsh/npc_team/jinxs/modes/wander.jinx +454 -181
  27. npcsh/npc_team/jinxs/modes/yap.jinx +10 -3
  28. npcsh/npcsh.py +112 -47
  29. npcsh/routes.py +4 -1
  30. npcsh/salmon_simulation.py +0 -0
  31. npcsh-1.1.21.data/data/npcsh/npc_team/alicanto.jinx +1633 -0
  32. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/arxiv.jinx +5 -5
  33. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/benchmark.jinx +2 -2
  34. npcsh-1.1.21.data/data/npcsh/npc_team/compress.jinx +428 -0
  35. npcsh-1.1.21.data/data/npcsh/npc_team/config_tui.jinx +300 -0
  36. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/corca.jinx +3 -3
  37. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/db_search.jinx +17 -6
  38. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/file_search.jinx +17 -6
  39. npcsh-1.1.21.data/data/npcsh/npc_team/git.jinx +795 -0
  40. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/jinxs.jinx +12 -12
  41. {npcsh/npc_team/jinxs/bin → npcsh-1.1.21.data/data/npcsh/npc_team}/kg.jinx +13 -13
  42. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/kg_search.jinx +19 -8
  43. npcsh-1.1.21.data/data/npcsh/npc_team/memories.jinx +414 -0
  44. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/models.jinx +7 -7
  45. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/nql.jinx +10 -21
  46. npcsh-1.1.21.data/data/npcsh/npc_team/papers.jinx +578 -0
  47. npcsh-1.1.21.data/data/npcsh/npc_team/plonk.jinx +565 -0
  48. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/reattach.jinx +3 -3
  49. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/setup.jinx +6 -6
  50. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/spool.jinx +3 -3
  51. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/team.jinx +12 -12
  52. npcsh-1.1.21.data/data/npcsh/npc_team/vixynt.jinx +388 -0
  53. npcsh-1.1.21.data/data/npcsh/npc_team/wander.jinx +728 -0
  54. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/web_search.jinx +52 -14
  55. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/yap.jinx +10 -3
  56. {npcsh-1.1.20.dist-info → npcsh-1.1.21.dist-info}/METADATA +2 -2
  57. {npcsh-1.1.20.dist-info → npcsh-1.1.21.dist-info}/RECORD +145 -150
  58. npcsh-1.1.21.dist-info/entry_points.txt +11 -0
  59. npcsh/npc_team/jinxs/bin/config_tui.jinx +0 -300
  60. npcsh/npc_team/jinxs/bin/memories.jinx +0 -317
  61. npcsh/npc_team/jinxs/bin/vixynt.jinx +0 -122
  62. npcsh/npc_team/jinxs/lib/core/search/mem_review.jinx +0 -73
  63. npcsh/npc_team/jinxs/lib/core/search/mem_search.jinx +0 -388
  64. npcsh/npc_team/jinxs/lib/research/paper_search.jinx +0 -412
  65. npcsh/npc_team/jinxs/lib/research/semantic_scholar.jinx +0 -386
  66. npcsh/npc_team/plonkjr.npc +0 -23
  67. npcsh-1.1.20.data/data/npcsh/npc_team/alicanto.jinx +0 -356
  68. npcsh-1.1.20.data/data/npcsh/npc_team/compress.jinx +0 -140
  69. npcsh-1.1.20.data/data/npcsh/npc_team/config_tui.jinx +0 -300
  70. npcsh-1.1.20.data/data/npcsh/npc_team/mem_review.jinx +0 -73
  71. npcsh-1.1.20.data/data/npcsh/npc_team/mem_search.jinx +0 -388
  72. npcsh-1.1.20.data/data/npcsh/npc_team/memories.jinx +0 -317
  73. npcsh-1.1.20.data/data/npcsh/npc_team/paper_search.jinx +0 -412
  74. npcsh-1.1.20.data/data/npcsh/npc_team/plonk.jinx +0 -379
  75. npcsh-1.1.20.data/data/npcsh/npc_team/plonkjr.npc +0 -23
  76. npcsh-1.1.20.data/data/npcsh/npc_team/semantic_scholar.jinx +0 -386
  77. npcsh-1.1.20.data/data/npcsh/npc_team/vixynt.jinx +0 -122
  78. npcsh-1.1.20.data/data/npcsh/npc_team/wander.jinx +0 -455
  79. npcsh-1.1.20.dist-info/entry_points.txt +0 -25
  80. /npcsh/npc_team/jinxs/lib/{orchestration → core}/convene.jinx +0 -0
  81. /npcsh/npc_team/jinxs/lib/{orchestration → core}/delegate.jinx +0 -0
  82. /npcsh/npc_team/jinxs/{bin → lib/core}/sample.jinx +0 -0
  83. /npcsh/npc_team/jinxs/{bin → lib/utils}/sync.jinx +0 -0
  84. /npcsh/npc_team/jinxs/{bin → modes}/roll.jinx +0 -0
  85. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/add_tab.jinx +0 -0
  86. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/alicanto.npc +0 -0
  87. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/alicanto.png +0 -0
  88. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/browser_action.jinx +0 -0
  89. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/browser_screenshot.jinx +0 -0
  90. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/build.jinx +0 -0
  91. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/chat.jinx +0 -0
  92. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/click.jinx +0 -0
  93. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/close_browser.jinx +0 -0
  94. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/close_pane.jinx +0 -0
  95. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/close_tab.jinx +0 -0
  96. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/cmd.jinx +0 -0
  97. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/compile.jinx +0 -0
  98. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/confirm.jinx +0 -0
  99. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/convene.jinx +0 -0
  100. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/corca.npc +0 -0
  101. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/corca.png +0 -0
  102. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/corca_example.png +0 -0
  103. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/delegate.jinx +0 -0
  104. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/edit_file.jinx +0 -0
  105. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/focus_pane.jinx +0 -0
  106. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/frederic.npc +0 -0
  107. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/frederic4.png +0 -0
  108. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/guac.jinx +0 -0
  109. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/guac.npc +0 -0
  110. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/guac.png +0 -0
  111. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/help.jinx +0 -0
  112. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/incognide.jinx +0 -0
  113. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/init.jinx +0 -0
  114. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/kadiefa.npc +0 -0
  115. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/kadiefa.png +0 -0
  116. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/key_press.jinx +0 -0
  117. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/launch_app.jinx +0 -0
  118. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/list_panes.jinx +0 -0
  119. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/load_file.jinx +0 -0
  120. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/navigate.jinx +0 -0
  121. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/notify.jinx +0 -0
  122. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/npcsh.ctx +0 -0
  123. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/npcsh_sibiji.png +0 -0
  124. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/open_browser.jinx +0 -0
  125. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/open_pane.jinx +0 -0
  126. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/ots.jinx +0 -0
  127. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/paste.jinx +0 -0
  128. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/plonk.npc +0 -0
  129. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/plonk.png +0 -0
  130. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/plonkjr.png +0 -0
  131. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/pti.jinx +0 -0
  132. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/python.jinx +0 -0
  133. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/read_pane.jinx +0 -0
  134. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/roll.jinx +0 -0
  135. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/run_terminal.jinx +0 -0
  136. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/sample.jinx +0 -0
  137. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/screenshot.jinx +0 -0
  138. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/search.jinx +0 -0
  139. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/send_message.jinx +0 -0
  140. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/serve.jinx +0 -0
  141. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/set.jinx +0 -0
  142. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/sh.jinx +0 -0
  143. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/shh.jinx +0 -0
  144. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/sibiji.npc +0 -0
  145. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/sibiji.png +0 -0
  146. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/sleep.jinx +0 -0
  147. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/split_pane.jinx +0 -0
  148. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/spool.png +0 -0
  149. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/sql.jinx +0 -0
  150. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/switch.jinx +0 -0
  151. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/switch_npc.jinx +0 -0
  152. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/switch_tab.jinx +0 -0
  153. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/switches.jinx +0 -0
  154. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/sync.jinx +0 -0
  155. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/teamviz.jinx +0 -0
  156. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/trigger.jinx +0 -0
  157. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/type_text.jinx +0 -0
  158. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/usage.jinx +0 -0
  159. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/verbose.jinx +0 -0
  160. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/wait.jinx +0 -0
  161. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/write_file.jinx +0 -0
  162. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/yap.png +0 -0
  163. {npcsh-1.1.20.data → npcsh-1.1.21.data}/data/npcsh/npc_team/zen_mode.jinx +0 -0
  164. {npcsh-1.1.20.dist-info → npcsh-1.1.21.dist-info}/WHEEL +0 -0
  165. {npcsh-1.1.20.dist-info → npcsh-1.1.21.dist-info}/licenses/LICENSE +0 -0
  166. {npcsh-1.1.20.dist-info → npcsh-1.1.21.dist-info}/top_level.txt +0 -0
@@ -65,8 +65,8 @@ steps:
65
65
  sys.stdout.flush()
66
66
  time.sleep(0.1)
67
67
  # Flush any pending input
68
- while select.select([sys.stdin], [], [], 0)[0]:
69
- sys.stdin.read(1)
68
+ while select.select([fd], [], [], 0)[0]:
69
+ os.read(fd, 1).decode('latin-1')
70
70
  except:
71
71
  pass
72
72
 
@@ -146,8 +146,8 @@ steps:
146
146
  out.append("\033[2J\033[H")
147
147
 
148
148
  header = f" NQL: {os.path.basename(db_path)} "
149
- out.append(f"\033[1;1H\033[44;37;1m{'=' * width}\033[0m")
150
- out.append(f"\033[1;{(width - len(header)) // 2}H\033[44;37;1m{header}\033[0m")
149
+ out.append(f"\033[1;1H\033[7;1m{'=' * width}\033[0m")
150
+ out.append(f"\033[1;{(width - len(header)) // 2}H\033[7;1m{header}\033[0m")
151
151
 
152
152
  modes = ['Tables', 'Query']
153
153
  tab_str = ""
@@ -264,10 +264,10 @@ steps:
264
264
 
265
265
  def handle_input(c):
266
266
  if c == '\x1b':
267
- if select.select([sys.stdin], [], [], 0.1)[0]:
268
- c2 = sys.stdin.read(1)
267
+ if select.select([fd], [], [], 0.1)[0]:
268
+ c2 = os.read(fd, 1).decode('latin-1')
269
269
  if c2 == '[':
270
- c3 = sys.stdin.read(1)
270
+ c3 = os.read(fd, 1).decode('latin-1')
271
271
  if c3 == 'A':
272
272
  if db_state.mode == 'tables':
273
273
  move_up()
@@ -324,19 +324,8 @@ steps:
324
324
  return True
325
325
 
326
326
  def handle_query_input(c):
327
- if c == 'k':
328
- db_state.row_scroll = max(0, db_state.row_scroll - 1)
329
- return True
330
- if c == 'j':
331
- db_state.row_scroll = min(max(0, len(db_state.rows) - 1), db_state.row_scroll + 1)
332
- return True
333
- if c == 'h':
334
- db_state.col_scroll = max(0, db_state.col_scroll - 1)
335
- return True
336
- if c == 'l':
337
- db_state.col_scroll = min(max(0, len(db_state.columns) - 1), db_state.col_scroll + 1)
338
- return True
339
-
327
+ # In query mode, all printable chars go to the buffer.
328
+ # Use arrow keys (handled in escape sequence block) for result navigation.
340
329
  if c == '\r' or c == '\n':
341
330
  if db_state.query_buffer.strip():
342
331
  run_query(db_state.query_buffer)
@@ -375,7 +364,7 @@ steps:
375
364
  render_screen()
376
365
 
377
366
  while True:
378
- c = sys.stdin.read(1)
367
+ c = os.read(fd, 1).decode('latin-1')
379
368
  if not handle_input(c):
380
369
  break
381
370
  render_screen()
@@ -0,0 +1,578 @@
1
+ jinx_name: papers
2
+ description: Multi-platform research paper browser with tabs for arXiv, Semantic Scholar, OpenReview, and Wikipedia
3
+ interactive: true
4
+ inputs:
5
+ - query: ""
6
+ - limit: 10
7
+ - text: "false"
8
+
9
+ steps:
10
+ - name: search_and_browse
11
+ engine: python
12
+ code: |
13
+ import os
14
+ import sys
15
+ import tty
16
+ import termios
17
+ import subprocess
18
+ import urllib.request
19
+ import urllib.parse
20
+ import xml.etree.ElementTree as ET
21
+ import textwrap
22
+ import json
23
+ import time
24
+
25
+ def get_terminal_size():
26
+ try:
27
+ size = os.get_terminal_size()
28
+ return size.columns, size.lines
29
+ except:
30
+ return 80, 24
31
+
32
+ # ── Source fetchers ──────────────────────────────────────────
33
+
34
+ def fetch_arxiv(query, limit):
35
+ papers = []
36
+ try:
37
+ search_query = "all:" + query
38
+ url = "http://export.arxiv.org/api/query?search_query=" + urllib.parse.quote(search_query, safe=':+') + "&start=0&max_results=" + str(limit) + "&sortBy=relevance&sortOrder=descending"
39
+ with urllib.request.urlopen(url, timeout=30) as response:
40
+ data = response.read().decode('utf-8')
41
+ root = ET.fromstring(data)
42
+ ns = {'atom': 'http://www.w3.org/2005/Atom'}
43
+ for entry in root.findall('atom:entry', ns):
44
+ title = entry.find('atom:title', ns).text.strip().replace('\n', ' ')
45
+ abstract = entry.find('atom:summary', ns).text.strip()
46
+ published = entry.find('atom:published', ns).text[:10]
47
+ authors = [a.find('atom:name', ns).text for a in entry.findall('atom:author', ns)]
48
+ arxiv_id = entry.find('atom:id', ns).text
49
+ aid = arxiv_id.split('/')[-1]
50
+ pdf_link = arxiv_id.replace('/abs/', '/pdf/') + '.pdf'
51
+ primary_cat = entry.find('{http://arxiv.org/schemas/atom}primary_category')
52
+ cat = primary_cat.get('term') if primary_cat is not None else ''
53
+ papers.append({
54
+ 'title': title, 'authors': authors, 'abstract': abstract,
55
+ 'year': published[:4], 'date': published, 'url': arxiv_id,
56
+ 'pdf': pdf_link, 'aid': aid, 'category': cat,
57
+ 'citations': None, 'venue': 'arXiv ' + cat,
58
+ 'source': 'arXiv'
59
+ })
60
+ except Exception as e:
61
+ papers.append({'title': f'[arXiv error: {e}]', 'authors': [], 'abstract': '', 'year': '', 'date': '', 'url': '', 'pdf': '', 'aid': '', 'category': '', 'citations': None, 'venue': '', 'source': 'arXiv'})
62
+ return papers
63
+
64
+ def fetch_semantic_scholar(query, limit):
65
+ papers = []
66
+ api_key = os.environ.get('S2_API_KEY')
67
+ try:
68
+ import requests
69
+ url = "https://api.semanticscholar.org/graph/v1/paper/search"
70
+ headers = {"x-api-key": api_key} if api_key else {}
71
+ params = {"query": query, "limit": limit, "fields": "title,abstract,authors,year,citationCount,url,venue,openAccessPdf"}
72
+ response = requests.get(url, headers=headers, params=params, timeout=30)
73
+ response.raise_for_status()
74
+ for p in response.json().get('data', []):
75
+ pdf_info = p.get('openAccessPdf') or {}
76
+ papers.append({
77
+ 'title': p.get('title', ''), 'authors': [a.get('name', '') for a in p.get('authors', [])],
78
+ 'abstract': p.get('abstract') or '', 'year': str(p.get('year') or ''),
79
+ 'date': str(p.get('year') or ''), 'url': p.get('url', ''),
80
+ 'pdf': pdf_info.get('url', ''), 'aid': '', 'category': '',
81
+ 'citations': p.get('citationCount', 0), 'venue': p.get('venue', ''),
82
+ 'source': 'S2'
83
+ })
84
+ except Exception as e:
85
+ papers.append({'title': f'[S2 error: {e}]', 'authors': [], 'abstract': '', 'year': '', 'date': '', 'url': '', 'pdf': '', 'aid': '', 'category': '', 'citations': None, 'venue': '', 'source': 'S2'})
86
+ return papers
87
+
88
+ def fetch_openreview(query, limit):
89
+ papers = []
90
+ try:
91
+ import requests
92
+ url = "https://api2.openreview.net/notes/search"
93
+ params = {"query": query, "limit": limit, "content": "all"}
94
+ response = requests.get(url, params=params, timeout=30)
95
+ response.raise_for_status()
96
+ for note in response.json().get('notes', []):
97
+ content = note.get('content', {})
98
+ title_field = content.get('title', {})
99
+ title = title_field.get('value', '') if isinstance(title_field, dict) else str(title_field)
100
+ abstract_field = content.get('abstract', {})
101
+ abstract = abstract_field.get('value', '') if isinstance(abstract_field, dict) else str(abstract_field)
102
+ authors_field = content.get('authors', {})
103
+ authors = authors_field.get('value', []) if isinstance(authors_field, dict) else (authors_field if isinstance(authors_field, list) else [])
104
+ venue_field = content.get('venue', {})
105
+ venue = venue_field.get('value', '') if isinstance(venue_field, dict) else str(venue_field) if venue_field else ''
106
+ forum = note.get('forum', note.get('id', ''))
107
+ cdate = note.get('cdate') or note.get('tcdate') or 0
108
+ year = time.strftime('%Y', time.gmtime(cdate / 1000)) if cdate else ''
109
+ papers.append({
110
+ 'title': title, 'authors': authors if isinstance(authors, list) else [],
111
+ 'abstract': abstract, 'year': year, 'date': year,
112
+ 'url': f'https://openreview.net/forum?id={forum}',
113
+ 'pdf': f'https://openreview.net/pdf?id={forum}',
114
+ 'aid': forum, 'category': '', 'citations': None,
115
+ 'venue': venue, 'source': 'OpenReview'
116
+ })
117
+ except Exception as e:
118
+ papers.append({'title': f'[OpenReview error: {e}]', 'authors': [], 'abstract': '', 'year': '', 'date': '', 'url': '', 'pdf': '', 'aid': '', 'category': '', 'citations': None, 'venue': '', 'source': 'OpenReview'})
119
+ return papers
120
+
121
+ def fetch_wikipedia(query, limit):
122
+ papers = []
123
+ try:
124
+ url = "https://en.wikipedia.org/w/api.php"
125
+ params = {"action": "query", "list": "search", "srsearch": query, "srlimit": limit, "format": "json", "srprop": "snippet|timestamp|wordcount"}
126
+ req = urllib.request.Request(url + "?" + urllib.parse.urlencode(params), headers={"User-Agent": "npcsh/1.0"})
127
+ with urllib.request.urlopen(req, timeout=15) as response:
128
+ data = json.loads(response.read().decode('utf-8'))
129
+ for r in data.get('query', {}).get('search', []):
130
+ import re
131
+ snippet = re.sub(r'<[^>]+>', '', r.get('snippet', ''))
132
+ title = r.get('title', '')
133
+ pageid = r.get('pageid', '')
134
+ papers.append({
135
+ 'title': title, 'authors': ['Wikipedia'],
136
+ 'abstract': snippet, 'year': r.get('timestamp', '')[:4],
137
+ 'date': r.get('timestamp', '')[:10],
138
+ 'url': f'https://en.wikipedia.org/wiki/{urllib.parse.quote(title.replace(" ", "_"))}',
139
+ 'pdf': '', 'aid': str(pageid), 'category': '',
140
+ 'citations': r.get('wordcount'), 'venue': 'Wikipedia',
141
+ 'source': 'Wikipedia'
142
+ })
143
+ except Exception as e:
144
+ papers.append({'title': f'[Wikipedia error: {e}]', 'authors': [], 'abstract': '', 'year': '', 'date': '', 'url': '', 'pdf': '', 'aid': '', 'category': '', 'citations': None, 'venue': '', 'source': 'Wikipedia'})
145
+ return papers
146
+
147
+ # ── PDF terminal rendering (from arxiv) ─────────────────────
148
+
149
+ def render_pdf_terminal(pdf_path, width=80):
150
+ lines = []
151
+ try:
152
+ result = subprocess.run(['pdftotext', '-nopgbrk', pdf_path, '-'],
153
+ capture_output=True, text=True, timeout=30)
154
+ if result.returncode == 0:
155
+ for line in result.stdout.split('\n'):
156
+ cleaned = line.strip()
157
+ if cleaned:
158
+ lines.append(cleaned[:width])
159
+ elif lines and lines[-1] != '':
160
+ lines.append('')
161
+ except Exception as e:
162
+ lines.append('[Text extraction failed: ' + str(e) + ']')
163
+ return lines
164
+
165
+ # ── Main ────────────────────────────────────────────────────
166
+
167
+ query = context.get('query', '').strip()
168
+ limit = int(context.get('limit', 10) or 10)
169
+ text_only = str(context.get('text', 'false')).lower() in ('true', '1', 'yes')
170
+
171
+ if text_only and query:
172
+ all_papers = fetch_arxiv(query, limit) + fetch_semantic_scholar(query, limit)
173
+ results = []
174
+ for i, p in enumerate(all_papers, 1):
175
+ auth = ', '.join(p['authors'][:3])
176
+ if len(p['authors']) > 3:
177
+ auth += ' et al.'
178
+ results.append(f"{i}. [{p['source']}] {p['title']} ({p['year']})")
179
+ results.append(f" Authors: {auth}")
180
+ if p['abstract']:
181
+ results.append(f" Abstract: {p['abstract'][:200]}...")
182
+ results.append(f" URL: {p['url']}")
183
+ results.append("")
184
+ context['output'] = f"Found {len(all_papers)} papers:\n\n" + "\n".join(results)
185
+ else:
186
+ # ── Interactive TUI ──────────────────────────────────────
187
+
188
+ TABS = [
189
+ {'name': 'arXiv', 'key': '1', 'fetch': fetch_arxiv, 'color': '\033[33m'},
190
+ {'name': 'S2', 'key': '2', 'fetch': fetch_semantic_scholar, 'color': '\033[34m'},
191
+ {'name': 'OpenReview', 'key': '3', 'fetch': fetch_openreview, 'color': '\033[35m'},
192
+ {'name': 'Wikipedia', 'key': '4', 'fetch': fetch_wikipedia, 'color': '\033[32m'},
193
+ ]
194
+
195
+ # Fetch all sources
196
+ tab_data = {}
197
+ for tab in TABS:
198
+ tab_data[tab['name']] = None # lazy load
199
+
200
+ active_tab = 0
201
+ selected = 0
202
+ scroll = 0
203
+ mode = 'search' if not query else 'list'
204
+ search_buf = query or ''
205
+ detail_scroll = 0
206
+ pdf_scroll = 0
207
+ pdf_lines = []
208
+ sort_mode = 'relevance'
209
+
210
+ width, height = get_terminal_size()
211
+
212
+ fd = sys.stdin.fileno()
213
+ old_settings = termios.tcgetattr(fd)
214
+
215
+ state = {'query': query}
216
+
217
+ def do_search(q):
218
+ state['query'] = q
219
+ for name in tab_data:
220
+ tab_data[name] = None
221
+
222
+ def get_papers():
223
+ if not state['query']:
224
+ return []
225
+ name = TABS[active_tab]['name']
226
+ if tab_data[name] is None:
227
+ sys.stdout.write(f'\033[{height // 2};1H\033[K Loading {name}...')
228
+ sys.stdout.flush()
229
+ tab_data[name] = TABS[active_tab]['fetch'](state['query'], limit)
230
+ sys.stdout.write('\033[2J\033[H')
231
+ return tab_data[name]
232
+
233
+ def sort_papers(papers, key):
234
+ if key == 'date':
235
+ return sorted(papers, key=lambda p: p.get('date') or '', reverse=True)
236
+ elif key == 'citations':
237
+ return sorted(papers, key=lambda p: p.get('citations') or 0, reverse=True)
238
+ elif key == 'author':
239
+ return sorted(papers, key=lambda p: (p['authors'][0] if p['authors'] else '').lower())
240
+ return papers
241
+
242
+ def draw_tabs(width):
243
+ """Draw tab bar."""
244
+ parts = []
245
+ for i, tab in enumerate(TABS):
246
+ name = tab['name']
247
+ if i == active_tab:
248
+ parts.append(f"\033[7;1m {tab['key']}:{name} \033[0m")
249
+ else:
250
+ parts.append(f"\033[2m {tab['key']}:{name} \033[0m")
251
+ return ''.join(parts)
252
+
253
+ try:
254
+ tty.setcbreak(fd)
255
+ sys.stdout.write('\033[?25l')
256
+ sys.stdout.write('\033[2J\033[H')
257
+
258
+ while True:
259
+ width, height = get_terminal_size()
260
+ list_height = height - 6
261
+ papers = get_papers()
262
+ if sort_mode != 'relevance':
263
+ papers = sort_papers(papers[:], sort_mode)
264
+
265
+ sys.stdout.write('\033[H')
266
+
267
+ # Tab bar
268
+ tab_bar = draw_tabs(width)
269
+ if mode == 'search':
270
+ search_info = ''
271
+ elif state['query']:
272
+ search_info = f" \033[2m\"{state['query']}\" ({len(papers)} results) [sort:{sort_mode}]\033[0m"
273
+ else:
274
+ search_info = ''
275
+ sys.stdout.write(tab_bar + search_info + '\033[K\n')
276
+
277
+ tab_color = TABS[active_tab]['color']
278
+
279
+ if mode == 'search':
280
+ # Search prompt screen
281
+ sys.stdout.write(f'\033[90m{"─" * width}\033[0m\n')
282
+ mid = height // 3
283
+ for i in range(height - 3):
284
+ sys.stdout.write(f'\033[{3+i};1H\033[K')
285
+ sys.stdout.write(f'\033[{mid};1H')
286
+ sys.stdout.write(f' \033[1mSearch research papers across platforms\033[0m\n')
287
+ sys.stdout.write(f'\033[{mid+1};1H\033[K')
288
+ sys.stdout.write(f' \033[2marXiv · Semantic Scholar · OpenReview · Wikipedia\033[0m\n')
289
+ sys.stdout.write(f'\033[{mid+3};1H\033[K')
290
+ sys.stdout.write(f'\033[?25h') # show cursor for typing
291
+ sys.stdout.write(f' \033[1m>\033[0m {search_buf}\033[K')
292
+ sys.stdout.write(f'\033[{height};1H\033[K\033[7m Type query and press Enter · q to quit \033[0m')
293
+
294
+ elif mode == 'list':
295
+ # Clamp selection
296
+ if papers:
297
+ selected = max(0, min(selected, len(papers) - 1))
298
+ if selected < scroll:
299
+ scroll = selected
300
+ elif selected >= scroll + list_height:
301
+ scroll = selected - list_height + 1
302
+
303
+ # Column header
304
+ cite_label = "CITE" if TABS[active_tab]['name'] in ('S2',) else "CAT " if TABS[active_tab]['name'] == 'arXiv' else " "
305
+ col = f" {'YEAR':<5} {cite_label:<6} {'AUTHORS':<24} {'TITLE'}"
306
+ sys.stdout.write(f'\033[90m{col[:width]}\033[0m\n')
307
+ sys.stdout.write(f'\033[90m{"─" * width}\033[0m\n')
308
+
309
+ for i in range(list_height):
310
+ idx = scroll + i
311
+ sys.stdout.write(f'\033[{4+i};1H\033[K')
312
+ if idx >= len(papers):
313
+ continue
314
+ p = papers[idx]
315
+ year = (p.get('year') or '?')[:5]
316
+ if TABS[active_tab]['name'] == 'S2':
317
+ cite = str(p.get('citations') or '-')[:6]
318
+ elif TABS[active_tab]['name'] == 'arXiv':
319
+ cite = (p.get('category') or '')[:6]
320
+ else:
321
+ cite = ''
322
+ cite = cite[:6]
323
+ auth_names = [a.split()[-1] if ' ' in a else a for a in p.get('authors', [])[:3]]
324
+ auth = ', '.join(auth_names)
325
+ if len(p.get('authors', [])) > 3:
326
+ auth += '..'
327
+ auth = auth[:24]
328
+ title = (p.get('title') or '')[:width - 40]
329
+
330
+ line = f" {year:<5} {cite:<6} {auth:<24} {title}"
331
+ line = line[:width - 1]
332
+
333
+ if idx == selected:
334
+ sys.stdout.write(f'\033[7;1m>{line}\033[0m')
335
+ else:
336
+ sys.stdout.write(f' {line}')
337
+
338
+ # Footer
339
+ sys.stdout.write(f'\033[{height-1};1H\033[K\033[90m{"─" * width}\033[0m')
340
+ pos = f'[{selected+1}/{len(papers)}]' if papers else '[0/0]'
341
+ footer = f' j/k:Nav Enter:Detail o:Open p:PDF d:DL v:View s:Sort Tab:Next 1-4:Jump /:Search q:Quit {pos} '
342
+ sys.stdout.write(f'\033[{height};1H\033[K\033[7m{footer[:width]}\033[0m')
343
+
344
+ elif mode == 'detail':
345
+ p = papers[selected] if papers else {}
346
+ sys.stdout.write(f'\033[90m{"─" * width}\033[0m\n')
347
+
348
+ lines = []
349
+ lines.append(f'\033[1mTitle:\033[0m {p.get("title", "")}')
350
+ lines.append('')
351
+ auth_str = ', '.join(p.get('authors', []))
352
+ lines.append(f'\033[1mAuthors ({len(p.get("authors", []))}):\033[0m {auth_str}')
353
+ lines.append(f'\033[1mYear:\033[0m {p.get("year", "?")} \033[1mVenue:\033[0m {p.get("venue", "")}')
354
+ if p.get('category'):
355
+ lines.append(f'\033[1mCategory:\033[0m {p["category"]}')
356
+ if p.get('citations') is not None:
357
+ lines.append(f'\033[1mCitations:\033[0m {p["citations"]}')
358
+ lines.append(f'\033[1mURL:\033[0m {p.get("url", "")}')
359
+ if p.get('pdf'):
360
+ lines.append(f'\033[1mPDF:\033[0m {p["pdf"]}')
361
+ lines.append('')
362
+ lines.append('\033[1mAbstract:\033[0m')
363
+ wrapped = textwrap.wrap(p.get('abstract', ''), width=width - 4)
364
+ lines.extend(wrapped)
365
+
366
+ for i in range(list_height + 1):
367
+ idx = detail_scroll + i
368
+ sys.stdout.write(f'\033[{3+i};1H\033[K')
369
+ if idx < len(lines):
370
+ sys.stdout.write(' ' + lines[idx][:width - 4])
371
+
372
+ sys.stdout.write(f'\033[{height-1};1H\033[K\033[90m{"─" * width}\033[0m')
373
+ footer = ' j/k:Scroll b:Back o:Browser p:PDF d:Download v:TermView q:Quit '
374
+ sys.stdout.write(f'\033[{height};1H\033[K\033[7m{footer[:width]}\033[0m')
375
+
376
+ elif mode == 'pdfview':
377
+ p = papers[selected] if papers else {}
378
+ header = f" PDF: {p.get('title', '')[:width-8]} "
379
+ sys.stdout.write(f'\033[7;1m{header[:width].ljust(width)}\033[0m\n')
380
+ sys.stdout.write(f'\033[90m{"─" * width}\033[0m\n')
381
+
382
+ view_height = height - 4
383
+ for i in range(view_height):
384
+ idx = pdf_scroll + i
385
+ sys.stdout.write(f'\033[{3+i};1H\033[K')
386
+ if idx < len(pdf_lines):
387
+ sys.stdout.write(' ' + pdf_lines[idx][:width - 4])
388
+
389
+ sys.stdout.write(f'\033[{height-1};1H\033[K\033[90m{"─" * width}\033[0m')
390
+ pct = int((pdf_scroll / max(1, len(pdf_lines) - view_height)) * 100) if len(pdf_lines) > view_height else 100
391
+ footer = f' j/k/PgDn/PgUp:Scroll b:Back q:Quit [{pct}%] '
392
+ sys.stdout.write(f'\033[{height};1H\033[K\033[7m{footer[:width]}\033[0m')
393
+
394
+ sys.stdout.flush()
395
+
396
+ # ── Input ──────────────────────────────────────
397
+ c = sys.stdin.read(1)
398
+
399
+ # Search mode input
400
+ if mode == 'search':
401
+ if c in ('\r', '\n'):
402
+ if search_buf.strip():
403
+ do_search(search_buf.strip())
404
+ mode = 'list'
405
+ selected = 0
406
+ scroll = 0
407
+ sys.stdout.write('\033[?25l')
408
+ sys.stdout.write('\033[2J\033[H')
409
+ elif c in ('q', '\x03') and not search_buf:
410
+ context['output'] = ''
411
+ break
412
+ elif c == '\x1b':
413
+ context['output'] = ''
414
+ break
415
+ elif c == '\x7f' or c == '\b':
416
+ search_buf = search_buf[:-1]
417
+ elif c.isprintable():
418
+ search_buf += c
419
+ continue
420
+
421
+ if c == '\x1b':
422
+ c2 = sys.stdin.read(1)
423
+ if c2 == '[':
424
+ c3 = sys.stdin.read(1)
425
+ if c3 == 'A': # Up
426
+ if mode == 'list' and selected > 0:
427
+ selected -= 1
428
+ elif mode in ('detail',) and detail_scroll > 0:
429
+ detail_scroll -= 1
430
+ elif mode == 'pdfview' and pdf_scroll > 0:
431
+ pdf_scroll -= 1
432
+ elif c3 == 'B': # Down
433
+ if mode == 'list' and papers and selected < len(papers) - 1:
434
+ selected += 1
435
+ elif mode == 'detail':
436
+ detail_scroll += 1
437
+ elif mode == 'pdfview':
438
+ pdf_scroll += 1
439
+ elif c3 == 'Z': # Shift+Tab - prev tab
440
+ active_tab = (active_tab - 1) % len(TABS)
441
+ selected = 0
442
+ scroll = 0
443
+ sort_mode = 'relevance'
444
+ mode = 'list'
445
+ sys.stdout.write('\033[2J\033[H')
446
+ elif c3 == '5': # Page Up
447
+ sys.stdin.read(1)
448
+ if mode == 'pdfview':
449
+ pdf_scroll = max(0, pdf_scroll - (height - 6))
450
+ elif c3 == '6': # Page Down
451
+ sys.stdin.read(1)
452
+ if mode == 'pdfview':
453
+ pdf_scroll += height - 6
454
+ else:
455
+ # Bare Esc
456
+ if mode == 'pdfview':
457
+ mode = 'detail'
458
+ sys.stdout.write('\033[2J\033[H')
459
+ elif mode == 'detail':
460
+ mode = 'list'
461
+ sys.stdout.write('\033[2J\033[H')
462
+ else:
463
+ context['output'] = f"Searched: {state['query']}"
464
+ break
465
+ continue
466
+
467
+ if c == '\t': # Tab - next tab
468
+ active_tab = (active_tab + 1) % len(TABS)
469
+ selected = 0
470
+ scroll = 0
471
+ sort_mode = 'relevance'
472
+ mode = 'list'
473
+ sys.stdout.write('\033[2J\033[H')
474
+ elif c in ('1', '2', '3', '4'):
475
+ new_tab = int(c) - 1
476
+ if new_tab != active_tab:
477
+ active_tab = new_tab
478
+ selected = 0
479
+ scroll = 0
480
+ sort_mode = 'relevance'
481
+ mode = 'list'
482
+ sys.stdout.write('\033[2J\033[H')
483
+ elif c == 'q' or c == '\x03':
484
+ context['output'] = f"Searched: {state['query']}"
485
+ break
486
+ elif c == 'k':
487
+ if mode == 'list' and selected > 0:
488
+ selected -= 1
489
+ elif mode == 'detail' and detail_scroll > 0:
490
+ detail_scroll -= 1
491
+ elif mode == 'pdfview' and pdf_scroll > 0:
492
+ pdf_scroll -= 1
493
+ elif c == 'j':
494
+ if mode == 'list' and papers and selected < len(papers) - 1:
495
+ selected += 1
496
+ elif mode == 'detail':
497
+ detail_scroll += 1
498
+ elif mode == 'pdfview':
499
+ pdf_scroll += 1
500
+ elif c in ('\r', '\n') and mode == 'list' and papers:
501
+ mode = 'detail'
502
+ detail_scroll = 0
503
+ sys.stdout.write('\033[2J\033[H')
504
+ elif c == 'b':
505
+ if mode == 'pdfview':
506
+ mode = 'detail'
507
+ sys.stdout.write('\033[2J\033[H')
508
+ elif mode == 'detail':
509
+ mode = 'list'
510
+ sys.stdout.write('\033[2J\033[H')
511
+ elif c == 'o' and papers:
512
+ p = papers[selected]
513
+ try:
514
+ subprocess.Popen(['xdg-open', p['url']], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
515
+ except:
516
+ pass
517
+ elif c == 'p' and papers:
518
+ p = papers[selected]
519
+ if p.get('pdf'):
520
+ try:
521
+ subprocess.Popen(['xdg-open', p['pdf']], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
522
+ except:
523
+ pass
524
+ elif c == 'd' and papers:
525
+ p = papers[selected]
526
+ if p.get('pdf'):
527
+ aid = (p.get('aid') or p.get('title', 'paper')[:30]).replace('/', '_').replace(' ', '_')
528
+ filename = aid + '.pdf'
529
+ filepath = os.path.join(os.getcwd(), filename)
530
+ try:
531
+ sys.stdout.write(f'\033[{height};1H\033[K\033[43;30m Downloading {filename}... \033[0m')
532
+ sys.stdout.flush()
533
+ urllib.request.urlretrieve(p['pdf'], filepath)
534
+ sys.stdout.write(f'\033[{height};1H\033[K\033[42;30m Saved: {filepath} \033[0m')
535
+ sys.stdout.flush()
536
+ time.sleep(1)
537
+ except Exception as e:
538
+ sys.stdout.write(f'\033[{height};1H\033[K\033[41;37m Download failed: {str(e)[:50]} \033[0m')
539
+ sys.stdout.flush()
540
+ time.sleep(2)
541
+ elif c == 'v' and papers:
542
+ p = papers[selected]
543
+ if p.get('pdf'):
544
+ import tempfile
545
+ try:
546
+ sys.stdout.write(f'\033[{height};1H\033[K\033[43;30m Fetching PDF... \033[0m')
547
+ sys.stdout.flush()
548
+ with tempfile.NamedTemporaryFile(suffix='.pdf', delete=False) as tmp:
549
+ tmp_path = tmp.name
550
+ urllib.request.urlretrieve(p['pdf'], tmp_path)
551
+ pdf_lines = render_pdf_terminal(tmp_path, width - 4)
552
+ os.unlink(tmp_path)
553
+ mode = 'pdfview'
554
+ pdf_scroll = 0
555
+ sys.stdout.write('\033[2J\033[H')
556
+ except Exception as e:
557
+ sys.stdout.write(f'\033[{height};1H\033[K\033[41;37m PDF view failed: {str(e)[:50]} \033[0m')
558
+ sys.stdout.flush()
559
+ time.sleep(2)
560
+ elif c == 's' and mode == 'list':
561
+ # Cycle sort modes
562
+ cycle = ['relevance', 'date', 'citations', 'author']
563
+ idx = cycle.index(sort_mode) if sort_mode in cycle else 0
564
+ sort_mode = cycle[(idx + 1) % len(cycle)]
565
+ selected = 0
566
+ scroll = 0
567
+ sys.stdout.write('\033[2J\033[H')
568
+ elif c == '/' and mode == 'list':
569
+ # New search
570
+ mode = 'search'
571
+ search_buf = ''
572
+ sys.stdout.write('\033[2J\033[H')
573
+
574
+ finally:
575
+ termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
576
+ sys.stdout.write('\033[?25h')
577
+ sys.stdout.write('\033[2J\033[H')
578
+ sys.stdout.flush()