@gguf/coder 0.3.1 → 0.3.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (413) hide show
  1. package/package.json +5 -2
  2. package/.editorconfig +0 -16
  3. package/.env.example +0 -63
  4. package/.gitattributes +0 -1
  5. package/.semgrepignore +0 -19
  6. package/coder-dummy-file.ts +0 -52
  7. package/coder.config.example.json +0 -59
  8. package/coder.config.json +0 -13
  9. package/color_picker.html +0 -36
  10. package/scripts/extract-changelog.js +0 -73
  11. package/scripts/fetch-models.js +0 -143
  12. package/scripts/test.sh +0 -40
  13. package/scripts/update-homebrew-formula.sh +0 -125
  14. package/scripts/update-nix-version.sh +0 -157
  15. package/source/ai-sdk-client/AISDKClient.spec.ts +0 -117
  16. package/source/ai-sdk-client/AISDKClient.ts +0 -155
  17. package/source/ai-sdk-client/chat/chat-handler.spec.ts +0 -121
  18. package/source/ai-sdk-client/chat/chat-handler.ts +0 -276
  19. package/source/ai-sdk-client/chat/streaming-handler.spec.ts +0 -173
  20. package/source/ai-sdk-client/chat/streaming-handler.ts +0 -110
  21. package/source/ai-sdk-client/chat/tool-processor.spec.ts +0 -92
  22. package/source/ai-sdk-client/chat/tool-processor.ts +0 -70
  23. package/source/ai-sdk-client/converters/message-converter.spec.ts +0 -220
  24. package/source/ai-sdk-client/converters/message-converter.ts +0 -113
  25. package/source/ai-sdk-client/converters/tool-converter.spec.ts +0 -90
  26. package/source/ai-sdk-client/converters/tool-converter.ts +0 -46
  27. package/source/ai-sdk-client/error-handling/error-extractor.spec.ts +0 -55
  28. package/source/ai-sdk-client/error-handling/error-extractor.ts +0 -15
  29. package/source/ai-sdk-client/error-handling/error-parser.spec.ts +0 -169
  30. package/source/ai-sdk-client/error-handling/error-parser.ts +0 -161
  31. package/source/ai-sdk-client/index.ts +0 -7
  32. package/source/ai-sdk-client/providers/provider-factory.spec.ts +0 -71
  33. package/source/ai-sdk-client/providers/provider-factory.ts +0 -41
  34. package/source/ai-sdk-client/types.ts +0 -9
  35. package/source/ai-sdk-client-empty-message.spec.ts +0 -141
  36. package/source/ai-sdk-client-error-handling.spec.ts +0 -186
  37. package/source/ai-sdk-client-maxretries.spec.ts +0 -114
  38. package/source/ai-sdk-client-preparestep.spec.ts +0 -279
  39. package/source/app/App.spec.tsx +0 -32
  40. package/source/app/App.tsx +0 -480
  41. package/source/app/components/AppContainer.spec.tsx +0 -96
  42. package/source/app/components/AppContainer.tsx +0 -56
  43. package/source/app/components/ChatInterface.spec.tsx +0 -163
  44. package/source/app/components/ChatInterface.tsx +0 -144
  45. package/source/app/components/ModalSelectors.spec.tsx +0 -141
  46. package/source/app/components/ModalSelectors.tsx +0 -135
  47. package/source/app/helpers.spec.ts +0 -97
  48. package/source/app/helpers.ts +0 -63
  49. package/source/app/index.ts +0 -4
  50. package/source/app/types.ts +0 -39
  51. package/source/app/utils/appUtils.ts +0 -294
  52. package/source/app/utils/conversationState.ts +0 -310
  53. package/source/app.spec.tsx +0 -244
  54. package/source/cli.spec.ts +0 -73
  55. package/source/cli.tsx +0 -51
  56. package/source/client-factory.spec.ts +0 -48
  57. package/source/client-factory.ts +0 -178
  58. package/source/command-parser.spec.ts +0 -127
  59. package/source/command-parser.ts +0 -36
  60. package/source/commands/checkpoint.spec.tsx +0 -277
  61. package/source/commands/checkpoint.tsx +0 -366
  62. package/source/commands/clear.tsx +0 -22
  63. package/source/commands/custom-commands.tsx +0 -121
  64. package/source/commands/exit.ts +0 -21
  65. package/source/commands/export.spec.tsx +0 -131
  66. package/source/commands/export.tsx +0 -79
  67. package/source/commands/help.tsx +0 -120
  68. package/source/commands/index.ts +0 -17
  69. package/source/commands/init.tsx +0 -339
  70. package/source/commands/lsp-command.spec.tsx +0 -281
  71. package/source/commands/lsp.tsx +0 -120
  72. package/source/commands/mcp-command.spec.tsx +0 -313
  73. package/source/commands/mcp.tsx +0 -162
  74. package/source/commands/model-database.spec.tsx +0 -758
  75. package/source/commands/model-database.tsx +0 -418
  76. package/source/commands/model.ts +0 -12
  77. package/source/commands/provider.ts +0 -12
  78. package/source/commands/setup-config.tsx +0 -16
  79. package/source/commands/simple-commands.spec.tsx +0 -175
  80. package/source/commands/status.ts +0 -12
  81. package/source/commands/theme.ts +0 -12
  82. package/source/commands/update.spec.tsx +0 -261
  83. package/source/commands/update.tsx +0 -201
  84. package/source/commands/usage.spec.tsx +0 -495
  85. package/source/commands/usage.tsx +0 -100
  86. package/source/commands.spec.ts +0 -436
  87. package/source/commands.ts +0 -83
  88. package/source/components/assistant-message.spec.tsx +0 -796
  89. package/source/components/assistant-message.tsx +0 -34
  90. package/source/components/bash-execution-indicator.tsx +0 -21
  91. package/source/components/cancelling-indicator.tsx +0 -16
  92. package/source/components/chat-queue.spec.tsx +0 -83
  93. package/source/components/chat-queue.tsx +0 -36
  94. package/source/components/checkpoint-display.spec.tsx +0 -219
  95. package/source/components/checkpoint-display.tsx +0 -126
  96. package/source/components/checkpoint-selector.spec.tsx +0 -173
  97. package/source/components/checkpoint-selector.tsx +0 -173
  98. package/source/components/development-mode-indicator.spec.tsx +0 -268
  99. package/source/components/development-mode-indicator.tsx +0 -38
  100. package/source/components/message-box.spec.tsx +0 -427
  101. package/source/components/message-box.tsx +0 -87
  102. package/source/components/model-selector.tsx +0 -132
  103. package/source/components/provider-selector.tsx +0 -75
  104. package/source/components/random-spinner.tsx +0 -19
  105. package/source/components/security-disclaimer.tsx +0 -73
  106. package/source/components/status-connection-display.spec.tsx +0 -133
  107. package/source/components/status.tsx +0 -267
  108. package/source/components/theme-selector.tsx +0 -126
  109. package/source/components/tool-confirmation.tsx +0 -190
  110. package/source/components/tool-execution-indicator.tsx +0 -33
  111. package/source/components/tool-message.tsx +0 -85
  112. package/source/components/ui/titled-box.spec.tsx +0 -207
  113. package/source/components/ui/titled-box.tsx +0 -57
  114. package/source/components/usage/progress-bar.spec.tsx +0 -398
  115. package/source/components/usage/progress-bar.tsx +0 -30
  116. package/source/components/usage/usage-display.spec.tsx +0 -780
  117. package/source/components/usage/usage-display.tsx +0 -291
  118. package/source/components/user-input.spec.tsx +0 -327
  119. package/source/components/user-input.tsx +0 -533
  120. package/source/components/user-message.spec.tsx +0 -230
  121. package/source/components/user-message.tsx +0 -84
  122. package/source/components/welcome-message.tsx +0 -76
  123. package/source/config/env-substitution.ts +0 -65
  124. package/source/config/index.spec.ts +0 -171
  125. package/source/config/index.ts +0 -154
  126. package/source/config/paths.spec.ts +0 -241
  127. package/source/config/paths.ts +0 -55
  128. package/source/config/preferences.ts +0 -51
  129. package/source/config/themes.ts +0 -315
  130. package/source/constants.ts +0 -130
  131. package/source/context/mode-context.spec.ts +0 -79
  132. package/source/context/mode-context.ts +0 -24
  133. package/source/custom-commands/executor.spec.ts +0 -142
  134. package/source/custom-commands/executor.ts +0 -64
  135. package/source/custom-commands/loader.spec.ts +0 -314
  136. package/source/custom-commands/loader.ts +0 -153
  137. package/source/custom-commands/parser.ts +0 -196
  138. package/source/hooks/chat-handler/conversation/conversation-loop.spec.ts +0 -39
  139. package/source/hooks/chat-handler/conversation/conversation-loop.tsx +0 -511
  140. package/source/hooks/chat-handler/conversation/tool-executor.spec.ts +0 -50
  141. package/source/hooks/chat-handler/conversation/tool-executor.tsx +0 -109
  142. package/source/hooks/chat-handler/index.ts +0 -12
  143. package/source/hooks/chat-handler/state/streaming-state.spec.ts +0 -26
  144. package/source/hooks/chat-handler/state/streaming-state.ts +0 -19
  145. package/source/hooks/chat-handler/types.ts +0 -38
  146. package/source/hooks/chat-handler/useChatHandler.spec.tsx +0 -321
  147. package/source/hooks/chat-handler/useChatHandler.tsx +0 -194
  148. package/source/hooks/chat-handler/utils/context-checker.spec.ts +0 -60
  149. package/source/hooks/chat-handler/utils/context-checker.tsx +0 -73
  150. package/source/hooks/chat-handler/utils/message-helpers.spec.ts +0 -42
  151. package/source/hooks/chat-handler/utils/message-helpers.tsx +0 -36
  152. package/source/hooks/chat-handler/utils/tool-filters.spec.ts +0 -109
  153. package/source/hooks/chat-handler/utils/tool-filters.ts +0 -64
  154. package/source/hooks/useAppHandlers.tsx +0 -291
  155. package/source/hooks/useAppInitialization.tsx +0 -422
  156. package/source/hooks/useAppState.tsx +0 -311
  157. package/source/hooks/useDirectoryTrust.tsx +0 -98
  158. package/source/hooks/useInputState.ts +0 -414
  159. package/source/hooks/useModeHandlers.tsx +0 -302
  160. package/source/hooks/useNonInteractiveMode.ts +0 -140
  161. package/source/hooks/useTerminalWidth.tsx +0 -81
  162. package/source/hooks/useTheme.ts +0 -18
  163. package/source/hooks/useToolHandler.tsx +0 -349
  164. package/source/hooks/useUIState.ts +0 -61
  165. package/source/init/agents-template-generator.ts +0 -421
  166. package/source/init/existing-rules-extractor.ts +0 -319
  167. package/source/init/file-scanner.spec.ts +0 -227
  168. package/source/init/file-scanner.ts +0 -238
  169. package/source/init/framework-detector.ts +0 -382
  170. package/source/init/language-detector.ts +0 -269
  171. package/source/init/project-analyzer.spec.ts +0 -231
  172. package/source/init/project-analyzer.ts +0 -458
  173. package/source/lsp/index.ts +0 -31
  174. package/source/lsp/lsp-client.spec.ts +0 -508
  175. package/source/lsp/lsp-client.ts +0 -487
  176. package/source/lsp/lsp-manager.spec.ts +0 -477
  177. package/source/lsp/lsp-manager.ts +0 -419
  178. package/source/lsp/protocol.spec.ts +0 -502
  179. package/source/lsp/protocol.ts +0 -360
  180. package/source/lsp/server-discovery.spec.ts +0 -654
  181. package/source/lsp/server-discovery.ts +0 -515
  182. package/source/markdown-parser/html-entities.spec.ts +0 -88
  183. package/source/markdown-parser/html-entities.ts +0 -45
  184. package/source/markdown-parser/index.spec.ts +0 -281
  185. package/source/markdown-parser/index.ts +0 -126
  186. package/source/markdown-parser/table-parser.spec.ts +0 -133
  187. package/source/markdown-parser/table-parser.ts +0 -114
  188. package/source/markdown-parser/utils.spec.ts +0 -70
  189. package/source/markdown-parser/utils.ts +0 -13
  190. package/source/mcp/mcp-client.spec.ts +0 -81
  191. package/source/mcp/mcp-client.ts +0 -625
  192. package/source/mcp/transport-factory.spec.ts +0 -406
  193. package/source/mcp/transport-factory.ts +0 -312
  194. package/source/message-handler.ts +0 -67
  195. package/source/model-database/database-engine.spec.ts +0 -494
  196. package/source/model-database/database-engine.ts +0 -50
  197. package/source/model-database/model-database.spec.ts +0 -363
  198. package/source/model-database/model-database.ts +0 -91
  199. package/source/model-database/model-engine.spec.ts +0 -447
  200. package/source/model-database/model-engine.ts +0 -65
  201. package/source/model-database/model-fetcher.spec.ts +0 -583
  202. package/source/model-database/model-fetcher.ts +0 -330
  203. package/source/models/index.ts +0 -1
  204. package/source/models/models-cache.spec.ts +0 -214
  205. package/source/models/models-cache.ts +0 -78
  206. package/source/models/models-dev-client.spec.ts +0 -379
  207. package/source/models/models-dev-client.ts +0 -329
  208. package/source/models/models-types.ts +0 -68
  209. package/source/prompt-history.ts +0 -155
  210. package/source/security/command-injection.spec.ts +0 -240
  211. package/source/services/checkpoint-manager.spec.ts +0 -523
  212. package/source/services/checkpoint-manager.ts +0 -466
  213. package/source/services/file-snapshot.spec.ts +0 -569
  214. package/source/services/file-snapshot.ts +0 -220
  215. package/source/test-utils/render-with-theme.tsx +0 -48
  216. package/source/tokenization/index.ts +0 -1
  217. package/source/tokenization/tokenizer-factory.spec.ts +0 -170
  218. package/source/tokenization/tokenizer-factory.ts +0 -125
  219. package/source/tokenization/tokenizers/anthropic-tokenizer.spec.ts +0 -200
  220. package/source/tokenization/tokenizers/anthropic-tokenizer.ts +0 -43
  221. package/source/tokenization/tokenizers/fallback-tokenizer.spec.ts +0 -236
  222. package/source/tokenization/tokenizers/fallback-tokenizer.ts +0 -26
  223. package/source/tokenization/tokenizers/llama-tokenizer.spec.ts +0 -224
  224. package/source/tokenization/tokenizers/llama-tokenizer.ts +0 -41
  225. package/source/tokenization/tokenizers/openai-tokenizer.spec.ts +0 -184
  226. package/source/tokenization/tokenizers/openai-tokenizer.ts +0 -57
  227. package/source/tool-calling/index.ts +0 -5
  228. package/source/tool-calling/json-parser.spec.ts +0 -639
  229. package/source/tool-calling/json-parser.ts +0 -247
  230. package/source/tool-calling/tool-parser.spec.ts +0 -395
  231. package/source/tool-calling/tool-parser.ts +0 -120
  232. package/source/tool-calling/xml-parser.spec.ts +0 -662
  233. package/source/tool-calling/xml-parser.ts +0 -289
  234. package/source/tools/execute-bash.spec.tsx +0 -353
  235. package/source/tools/execute-bash.tsx +0 -219
  236. package/source/tools/execute-function.spec.ts +0 -130
  237. package/source/tools/fetch-url.spec.tsx +0 -342
  238. package/source/tools/fetch-url.tsx +0 -172
  239. package/source/tools/find-files.spec.tsx +0 -924
  240. package/source/tools/find-files.tsx +0 -293
  241. package/source/tools/index.ts +0 -102
  242. package/source/tools/lsp-get-diagnostics.tsx +0 -192
  243. package/source/tools/needs-approval.spec.ts +0 -282
  244. package/source/tools/read-file.spec.tsx +0 -801
  245. package/source/tools/read-file.tsx +0 -387
  246. package/source/tools/search-file-contents.spec.tsx +0 -1273
  247. package/source/tools/search-file-contents.tsx +0 -293
  248. package/source/tools/string-replace.spec.tsx +0 -730
  249. package/source/tools/string-replace.tsx +0 -548
  250. package/source/tools/tool-manager.ts +0 -210
  251. package/source/tools/tool-registry.spec.ts +0 -415
  252. package/source/tools/tool-registry.ts +0 -228
  253. package/source/tools/web-search.tsx +0 -223
  254. package/source/tools/write-file.spec.tsx +0 -559
  255. package/source/tools/write-file.tsx +0 -228
  256. package/source/types/app.ts +0 -37
  257. package/source/types/checkpoint.ts +0 -48
  258. package/source/types/commands.ts +0 -46
  259. package/source/types/components.ts +0 -27
  260. package/source/types/config.ts +0 -103
  261. package/source/types/core-connection-status.spec.ts +0 -67
  262. package/source/types/core.ts +0 -181
  263. package/source/types/hooks.ts +0 -50
  264. package/source/types/index.ts +0 -12
  265. package/source/types/markdown-parser.ts +0 -11
  266. package/source/types/mcp.ts +0 -52
  267. package/source/types/system.ts +0 -16
  268. package/source/types/tokenization.ts +0 -41
  269. package/source/types/ui.ts +0 -40
  270. package/source/types/usage.ts +0 -58
  271. package/source/types/utils.ts +0 -16
  272. package/source/usage/calculator.spec.ts +0 -385
  273. package/source/usage/calculator.ts +0 -104
  274. package/source/usage/storage.spec.ts +0 -703
  275. package/source/usage/storage.ts +0 -238
  276. package/source/usage/tracker.spec.ts +0 -456
  277. package/source/usage/tracker.ts +0 -102
  278. package/source/utils/atomic-deletion.spec.ts +0 -194
  279. package/source/utils/atomic-deletion.ts +0 -127
  280. package/source/utils/bounded-map.spec.ts +0 -300
  281. package/source/utils/bounded-map.ts +0 -193
  282. package/source/utils/checkpoint-utils.spec.ts +0 -222
  283. package/source/utils/checkpoint-utils.ts +0 -92
  284. package/source/utils/error-formatter.spec.ts +0 -169
  285. package/source/utils/error-formatter.ts +0 -194
  286. package/source/utils/file-autocomplete.spec.ts +0 -173
  287. package/source/utils/file-autocomplete.ts +0 -196
  288. package/source/utils/file-cache.spec.ts +0 -309
  289. package/source/utils/file-cache.ts +0 -195
  290. package/source/utils/file-content-loader.spec.ts +0 -180
  291. package/source/utils/file-content-loader.ts +0 -179
  292. package/source/utils/file-mention-handler.spec.ts +0 -261
  293. package/source/utils/file-mention-handler.ts +0 -84
  294. package/source/utils/file-mention-parser.spec.ts +0 -182
  295. package/source/utils/file-mention-parser.ts +0 -170
  296. package/source/utils/fuzzy-matching.spec.ts +0 -149
  297. package/source/utils/fuzzy-matching.ts +0 -146
  298. package/source/utils/indentation-normalizer.spec.ts +0 -216
  299. package/source/utils/indentation-normalizer.ts +0 -76
  300. package/source/utils/installation-detector.spec.ts +0 -178
  301. package/source/utils/installation-detector.ts +0 -153
  302. package/source/utils/logging/config.spec.ts +0 -311
  303. package/source/utils/logging/config.ts +0 -210
  304. package/source/utils/logging/console-facade.spec.ts +0 -184
  305. package/source/utils/logging/console-facade.ts +0 -384
  306. package/source/utils/logging/correlation.spec.ts +0 -679
  307. package/source/utils/logging/correlation.ts +0 -474
  308. package/source/utils/logging/formatters.spec.ts +0 -464
  309. package/source/utils/logging/formatters.ts +0 -207
  310. package/source/utils/logging/health-monitor/alerts/alert-manager.spec.ts +0 -93
  311. package/source/utils/logging/health-monitor/alerts/alert-manager.ts +0 -79
  312. package/source/utils/logging/health-monitor/checks/configuration-check.spec.ts +0 -56
  313. package/source/utils/logging/health-monitor/checks/configuration-check.ts +0 -43
  314. package/source/utils/logging/health-monitor/checks/logging-check.spec.ts +0 -56
  315. package/source/utils/logging/health-monitor/checks/logging-check.ts +0 -58
  316. package/source/utils/logging/health-monitor/checks/memory-check.spec.ts +0 -100
  317. package/source/utils/logging/health-monitor/checks/memory-check.ts +0 -78
  318. package/source/utils/logging/health-monitor/checks/performance-check.spec.ts +0 -56
  319. package/source/utils/logging/health-monitor/checks/performance-check.ts +0 -56
  320. package/source/utils/logging/health-monitor/checks/request-check.spec.ts +0 -56
  321. package/source/utils/logging/health-monitor/checks/request-check.ts +0 -76
  322. package/source/utils/logging/health-monitor/core/health-check-runner.spec.ts +0 -70
  323. package/source/utils/logging/health-monitor/core/health-check-runner.ts +0 -138
  324. package/source/utils/logging/health-monitor/core/health-monitor.spec.ts +0 -58
  325. package/source/utils/logging/health-monitor/core/health-monitor.ts +0 -344
  326. package/source/utils/logging/health-monitor/core/scoring.spec.ts +0 -65
  327. package/source/utils/logging/health-monitor/core/scoring.ts +0 -91
  328. package/source/utils/logging/health-monitor/index.ts +0 -15
  329. package/source/utils/logging/health-monitor/instances.ts +0 -48
  330. package/source/utils/logging/health-monitor/middleware/http-middleware.spec.ts +0 -141
  331. package/source/utils/logging/health-monitor/middleware/http-middleware.ts +0 -75
  332. package/source/utils/logging/health-monitor/types.ts +0 -126
  333. package/source/utils/logging/index.spec.ts +0 -284
  334. package/source/utils/logging/index.ts +0 -236
  335. package/source/utils/logging/integration.spec.ts +0 -441
  336. package/source/utils/logging/log-method-factory.spec.ts +0 -573
  337. package/source/utils/logging/log-method-factory.ts +0 -233
  338. package/source/utils/logging/log-query/aggregation/aggregator.spec.ts +0 -277
  339. package/source/utils/logging/log-query/aggregation/aggregator.ts +0 -159
  340. package/source/utils/logging/log-query/aggregation/facet-generator.spec.ts +0 -159
  341. package/source/utils/logging/log-query/aggregation/facet-generator.ts +0 -47
  342. package/source/utils/logging/log-query/index.ts +0 -23
  343. package/source/utils/logging/log-query/query/filter-predicates.spec.ts +0 -247
  344. package/source/utils/logging/log-query/query/filter-predicates.ts +0 -154
  345. package/source/utils/logging/log-query/query/query-builder.spec.ts +0 -182
  346. package/source/utils/logging/log-query/query/query-builder.ts +0 -151
  347. package/source/utils/logging/log-query/query/query-engine.spec.ts +0 -214
  348. package/source/utils/logging/log-query/query/query-engine.ts +0 -45
  349. package/source/utils/logging/log-query/storage/circular-buffer.spec.ts +0 -143
  350. package/source/utils/logging/log-query/storage/circular-buffer.ts +0 -75
  351. package/source/utils/logging/log-query/storage/index-manager.spec.ts +0 -150
  352. package/source/utils/logging/log-query/storage/index-manager.ts +0 -71
  353. package/source/utils/logging/log-query/storage/log-storage.spec.ts +0 -257
  354. package/source/utils/logging/log-query/storage/log-storage.ts +0 -80
  355. package/source/utils/logging/log-query/types.ts +0 -163
  356. package/source/utils/logging/log-query/utils/helpers.spec.ts +0 -263
  357. package/source/utils/logging/log-query/utils/helpers.ts +0 -72
  358. package/source/utils/logging/log-query/utils/sorting.spec.ts +0 -182
  359. package/source/utils/logging/log-query/utils/sorting.ts +0 -61
  360. package/source/utils/logging/logger-provider.spec.ts +0 -262
  361. package/source/utils/logging/logger-provider.ts +0 -362
  362. package/source/utils/logging/performance.spec.ts +0 -209
  363. package/source/utils/logging/performance.ts +0 -757
  364. package/source/utils/logging/pino-logger.spec.ts +0 -425
  365. package/source/utils/logging/pino-logger.ts +0 -514
  366. package/source/utils/logging/redaction.spec.ts +0 -490
  367. package/source/utils/logging/redaction.ts +0 -267
  368. package/source/utils/logging/request-tracker.spec.ts +0 -1198
  369. package/source/utils/logging/request-tracker.ts +0 -803
  370. package/source/utils/logging/transports.spec.ts +0 -505
  371. package/source/utils/logging/transports.ts +0 -305
  372. package/source/utils/logging/types.ts +0 -216
  373. package/source/utils/message-builder.spec.ts +0 -179
  374. package/source/utils/message-builder.ts +0 -101
  375. package/source/utils/message-queue.tsx +0 -486
  376. package/source/utils/paste-detection.spec.ts +0 -69
  377. package/source/utils/paste-detection.ts +0 -124
  378. package/source/utils/paste-roundtrip.spec.ts +0 -442
  379. package/source/utils/paste-utils.spec.ts +0 -128
  380. package/source/utils/paste-utils.ts +0 -52
  381. package/source/utils/programming-language-helper.spec.ts +0 -74
  382. package/source/utils/programming-language-helper.ts +0 -32
  383. package/source/utils/prompt-assembly.spec.ts +0 -221
  384. package/source/utils/prompt-processor.ts +0 -173
  385. package/source/utils/tool-args-parser.spec.ts +0 -136
  386. package/source/utils/tool-args-parser.ts +0 -54
  387. package/source/utils/tool-cancellation.spec.ts +0 -230
  388. package/source/utils/tool-cancellation.ts +0 -28
  389. package/source/utils/tool-result-display.spec.tsx +0 -469
  390. package/source/utils/tool-result-display.tsx +0 -90
  391. package/source/utils/update-checker.spec.ts +0 -383
  392. package/source/utils/update-checker.ts +0 -183
  393. package/source/wizard/config-wizard.spec.tsx +0 -103
  394. package/source/wizard/config-wizard.tsx +0 -382
  395. package/source/wizard/steps/location-step.spec.tsx +0 -186
  396. package/source/wizard/steps/location-step.tsx +0 -147
  397. package/source/wizard/steps/mcp-step.spec.tsx +0 -607
  398. package/source/wizard/steps/mcp-step.tsx +0 -632
  399. package/source/wizard/steps/provider-step.spec.tsx +0 -342
  400. package/source/wizard/steps/provider-step.tsx +0 -957
  401. package/source/wizard/steps/summary-step.spec.tsx +0 -749
  402. package/source/wizard/steps/summary-step.tsx +0 -228
  403. package/source/wizard/templates/mcp-templates.spec.ts +0 -613
  404. package/source/wizard/templates/mcp-templates.ts +0 -570
  405. package/source/wizard/templates/provider-templates.spec.ts +0 -152
  406. package/source/wizard/templates/provider-templates.ts +0 -485
  407. package/source/wizard/utils/fetch-cloud-models.spec.ts +0 -428
  408. package/source/wizard/utils/fetch-cloud-models.ts +0 -223
  409. package/source/wizard/utils/fetch-local-models.spec.ts +0 -297
  410. package/source/wizard/utils/fetch-local-models.ts +0 -192
  411. package/source/wizard/validation-array.spec.ts +0 -264
  412. package/source/wizard/validation.spec.ts +0 -373
  413. package/source/wizard/validation.ts +0 -232
@@ -1,224 +0,0 @@
1
- /**
2
- * Tests for llama-tokenizer.ts
3
- */
4
-
5
- import type {Message} from '@/types/core.js';
6
- import test from 'ava';
7
- import {LlamaTokenizer} from './llama-tokenizer.js';
8
-
9
- console.log(`\nllama-tokenizer.spec.ts`);
10
-
11
- test('LlamaTokenizer encodes simple text', t => {
12
- const tokenizer = new LlamaTokenizer('llama-3-8b');
13
- const count = tokenizer.encode('Hello, world!');
14
-
15
- // Should return a positive token count
16
- t.true(count > 0);
17
- t.true(count < 10);
18
- });
19
-
20
- test('LlamaTokenizer encodes empty string', t => {
21
- const tokenizer = new LlamaTokenizer('llama-3-8b');
22
- const count = tokenizer.encode('');
23
-
24
- t.is(count, 0);
25
- });
26
-
27
- test('LlamaTokenizer encodes longer text', t => {
28
- const tokenizer = new LlamaTokenizer('llama-3-8b');
29
- const text =
30
- 'This is a longer piece of text that should have more tokens than a simple hello world.';
31
- const count = tokenizer.encode(text);
32
-
33
- // Should have significantly more tokens
34
- t.true(count > 10);
35
- t.true(count < 50);
36
- });
37
-
38
- test('LlamaTokenizer defaults to llama when no model specified', t => {
39
- const tokenizer = new LlamaTokenizer();
40
-
41
- t.is(tokenizer.getName(), 'llama-llama');
42
- });
43
-
44
- test('LlamaTokenizer getName returns correct format', t => {
45
- const tokenizer = new LlamaTokenizer('llama-3-70b');
46
-
47
- t.is(tokenizer.getName(), 'llama-llama-3-70b');
48
- });
49
-
50
- test('LlamaTokenizer countTokens for user message', t => {
51
- const tokenizer = new LlamaTokenizer('llama-3-8b');
52
- const message: Message = {
53
- role: 'user',
54
- content: 'Hello, how are you?',
55
- };
56
-
57
- const count = tokenizer.countTokens(message);
58
-
59
- // Should include content tokens + role tokens + overhead
60
- t.true(count > 5);
61
- t.true(count < 25);
62
- });
63
-
64
- test('LlamaTokenizer countTokens for assistant message', t => {
65
- const tokenizer = new LlamaTokenizer('llama-3-8b');
66
- const message: Message = {
67
- role: 'assistant',
68
- content: 'I am doing well, thank you!',
69
- };
70
-
71
- const count = tokenizer.countTokens(message);
72
-
73
- t.true(count > 5);
74
- });
75
-
76
- test('LlamaTokenizer countTokens for system message', t => {
77
- const tokenizer = new LlamaTokenizer('llama-3-8b');
78
- const message: Message = {
79
- role: 'system',
80
- content: 'You are a helpful assistant.',
81
- };
82
-
83
- const count = tokenizer.countTokens(message);
84
-
85
- t.true(count > 5);
86
- });
87
-
88
- test('LlamaTokenizer countTokens handles empty content', t => {
89
- const tokenizer = new LlamaTokenizer('llama-3-8b');
90
- const message: Message = {
91
- role: 'user',
92
- content: '',
93
- };
94
-
95
- const count = tokenizer.countTokens(message);
96
-
97
- // Should still have overhead for role and message structure
98
- t.true(count >= 6);
99
- });
100
-
101
- test('LlamaTokenizer countTokens handles missing content', t => {
102
- const tokenizer = new LlamaTokenizer('llama-3-8b');
103
- const message: Message = {
104
- role: 'user',
105
- } as Message;
106
-
107
- const count = tokenizer.countTokens(message);
108
-
109
- // Should handle gracefully
110
- t.true(count >= 0);
111
- });
112
-
113
- test('LlamaTokenizer countTokens includes message overhead', t => {
114
- const tokenizer = new LlamaTokenizer('llama-3-8b');
115
- const shortMessage: Message = {
116
- role: 'user',
117
- content: 'Hi',
118
- };
119
-
120
- const count = tokenizer.countTokens(shortMessage);
121
- const contentOnly = tokenizer.encode('Hi');
122
- const roleOnly = tokenizer.encode('user');
123
-
124
- // Total should be more than just content + role due to overhead
125
- t.true(count > contentOnly + roleOnly);
126
- });
127
-
128
- test('LlamaTokenizer handles special characters', t => {
129
- const tokenizer = new LlamaTokenizer('llama-3-8b');
130
- const text = '你好世界 🌍 Привет мир';
131
- const count = tokenizer.encode(text);
132
-
133
- t.true(count > 0);
134
- });
135
-
136
- test('LlamaTokenizer handles code snippets', t => {
137
- const tokenizer = new LlamaTokenizer('llama-3-8b');
138
- const code = `
139
- function hello() {
140
- console.log("Hello, world!");
141
- }
142
- `;
143
- const count = tokenizer.encode(code);
144
-
145
- t.true(count > 10);
146
- });
147
-
148
- test('LlamaTokenizer works with mistral model', t => {
149
- const tokenizer = new LlamaTokenizer('mistral-7b');
150
- const count = tokenizer.encode('Hello, world!');
151
-
152
- t.true(count > 0);
153
- t.is(tokenizer.getName(), 'llama-mistral-7b');
154
- });
155
-
156
- test('LlamaTokenizer works with qwen model', t => {
157
- const tokenizer = new LlamaTokenizer('qwen-2.5');
158
- const count = tokenizer.encode('Hello, world!');
159
-
160
- t.true(count > 0);
161
- t.is(tokenizer.getName(), 'llama-qwen-2.5');
162
- });
163
-
164
- test('LlamaTokenizer works with codellama model', t => {
165
- const tokenizer = new LlamaTokenizer('codellama-7b');
166
- const count = tokenizer.encode('Hello, world!');
167
-
168
- t.true(count > 0);
169
- t.is(tokenizer.getName(), 'llama-codellama-7b');
170
- });
171
-
172
- test('LlamaTokenizer handles long messages', t => {
173
- const tokenizer = new LlamaTokenizer('llama-3-8b');
174
- const longText = 'Hello '.repeat(1000);
175
- const message: Message = {
176
- role: 'user',
177
- content: longText,
178
- };
179
-
180
- const count = tokenizer.countTokens(message);
181
-
182
- // Should handle long text without crashing
183
- t.true(count > 1000);
184
- });
185
-
186
- test('LlamaTokenizer uses fallback on encoding error', t => {
187
- const tokenizer = new LlamaTokenizer('llama-3-8b');
188
-
189
- // The fallback should kick in for any edge cases
190
- // Testing with normal text should still work
191
- const count = tokenizer.encode('Normal text');
192
-
193
- t.true(count > 0);
194
- });
195
-
196
- test('LlamaTokenizer countTokens with tool message', t => {
197
- const tokenizer = new LlamaTokenizer('llama-3-8b');
198
- const message: Message = {
199
- role: 'tool',
200
- content: 'Tool result here',
201
- tool_call_id: '123',
202
- };
203
-
204
- const count = tokenizer.countTokens(message);
205
-
206
- // Should handle tool messages
207
- t.true(count > 0);
208
- });
209
-
210
- test('LlamaTokenizer handles deepseek model', t => {
211
- const tokenizer = new LlamaTokenizer('deepseek-coder-33b');
212
- const count = tokenizer.encode('const x = 42;');
213
-
214
- t.true(count > 0);
215
- t.is(tokenizer.getName(), 'llama-deepseek-coder-33b');
216
- });
217
-
218
- test('LlamaTokenizer handles mixtral model', t => {
219
- const tokenizer = new LlamaTokenizer('mixtral-8x7b');
220
- const count = tokenizer.encode('Hello, world!');
221
-
222
- t.true(count > 0);
223
- t.is(tokenizer.getName(), 'llama-mixtral-8x7b');
224
- });
@@ -1,41 +0,0 @@
1
- /**
2
- * Llama tokenizer for local models
3
- * Uses llama-tokenizer-js package
4
- */
5
-
6
- import type {Message} from '@/types/core';
7
- import llamaTokenizer from 'llama-tokenizer-js';
8
- import type {Tokenizer} from '../../types/tokenization';
9
-
10
- export class LlamaTokenizer implements Tokenizer {
11
- private modelName: string;
12
-
13
- constructor(modelId?: string) {
14
- this.modelName = modelId || 'llama';
15
- }
16
-
17
- encode(text: string): number {
18
- try {
19
- const tokens = llamaTokenizer.encode(text);
20
- return tokens.length;
21
- } catch {
22
- // Fallback to character-based estimation if tokenization fails
23
- return Math.ceil(text.length / 4);
24
- }
25
- }
26
-
27
- countTokens(message: Message): number {
28
- const content = message.content || '';
29
- const role = message.role || '';
30
-
31
- // Llama format: <|start_header_id|>role<|end_header_id|>content<|eot_id|>
32
- // Approximate overhead for message formatting
33
- const messageOverhead = 6;
34
-
35
- return this.encode(content) + this.encode(role) + messageOverhead;
36
- }
37
-
38
- getName(): string {
39
- return `llama-${this.modelName}`;
40
- }
41
- }
@@ -1,184 +0,0 @@
1
- /**
2
- * Tests for openai-tokenizer.ts
3
- */
4
-
5
- import type {Message} from '@/types/core.js';
6
- import test from 'ava';
7
- import {OpenAITokenizer} from './openai-tokenizer.js';
8
-
9
- console.log(`\nopenai-tokenizer.spec.ts`);
10
-
11
- test('OpenAITokenizer encodes simple text', t => {
12
- const tokenizer = new OpenAITokenizer('gpt-4');
13
- const count = tokenizer.encode('Hello, world!');
14
-
15
- // "Hello, world!" should tokenize to around 4 tokens
16
- t.true(count > 0);
17
- t.true(count < 10);
18
- });
19
-
20
- test('OpenAITokenizer encodes empty string', t => {
21
- const tokenizer = new OpenAITokenizer('gpt-4');
22
- const count = tokenizer.encode('');
23
-
24
- t.is(count, 0);
25
- });
26
-
27
- test('OpenAITokenizer encodes longer text', t => {
28
- const tokenizer = new OpenAITokenizer('gpt-4');
29
- const text =
30
- 'This is a longer piece of text that should have more tokens than a simple hello world.';
31
- const count = tokenizer.encode(text);
32
-
33
- // Should have significantly more tokens
34
- t.true(count > 10);
35
- t.true(count < 50);
36
- });
37
-
38
- test('OpenAITokenizer uses fallback encoding for unsupported model', t => {
39
- const tokenizer = new OpenAITokenizer('unknown-model-xyz');
40
- const count = tokenizer.encode('Hello, world!');
41
-
42
- // Should still return a count using fallback
43
- t.true(count > 0);
44
- });
45
-
46
- test('OpenAITokenizer defaults to gpt-4 when no model specified', t => {
47
- const tokenizer = new OpenAITokenizer();
48
-
49
- t.is(tokenizer.getName(), 'openai-gpt-4');
50
- });
51
-
52
- test('OpenAITokenizer getName returns correct format', t => {
53
- const tokenizer = new OpenAITokenizer('gpt-3.5-turbo');
54
-
55
- t.is(tokenizer.getName(), 'openai-gpt-3.5-turbo');
56
- });
57
-
58
- test('OpenAITokenizer countTokens for user message', t => {
59
- const tokenizer = new OpenAITokenizer('gpt-4');
60
- const message: Message = {
61
- role: 'user',
62
- content: 'Hello, how are you?',
63
- };
64
-
65
- const count = tokenizer.countTokens(message);
66
-
67
- // Should include content tokens + role tokens + overhead
68
- t.true(count > 5);
69
- t.true(count < 20);
70
- });
71
-
72
- test('OpenAITokenizer countTokens for assistant message', t => {
73
- const tokenizer = new OpenAITokenizer('gpt-4');
74
- const message: Message = {
75
- role: 'assistant',
76
- content: 'I am doing well, thank you!',
77
- };
78
-
79
- const count = tokenizer.countTokens(message);
80
-
81
- t.true(count > 5);
82
- });
83
-
84
- test('OpenAITokenizer countTokens for system message', t => {
85
- const tokenizer = new OpenAITokenizer('gpt-4');
86
- const message: Message = {
87
- role: 'system',
88
- content: 'You are a helpful assistant.',
89
- };
90
-
91
- const count = tokenizer.countTokens(message);
92
-
93
- t.true(count > 5);
94
- });
95
-
96
- test('OpenAITokenizer countTokens handles empty content', t => {
97
- const tokenizer = new OpenAITokenizer('gpt-4');
98
- const message: Message = {
99
- role: 'user',
100
- content: '',
101
- };
102
-
103
- const count = tokenizer.countTokens(message);
104
-
105
- // Should still have overhead for role and message structure
106
- t.true(count >= 4);
107
- });
108
-
109
- test('OpenAITokenizer countTokens handles missing content', t => {
110
- const tokenizer = new OpenAITokenizer('gpt-4');
111
- const message: Message = {
112
- role: 'user',
113
- } as Message;
114
-
115
- const count = tokenizer.countTokens(message);
116
-
117
- // Should handle gracefully
118
- t.true(count >= 0);
119
- });
120
-
121
- test('OpenAITokenizer countTokens includes message overhead', t => {
122
- const tokenizer = new OpenAITokenizer('gpt-4');
123
- const shortMessage: Message = {
124
- role: 'user',
125
- content: 'Hi',
126
- };
127
-
128
- const count = tokenizer.countTokens(shortMessage);
129
- const contentOnly = tokenizer.encode('Hi');
130
- const roleOnly = tokenizer.encode('user');
131
-
132
- // Total should be more than just content + role due to overhead
133
- t.true(count > contentOnly + roleOnly);
134
- });
135
-
136
- test('OpenAITokenizer free method exists', t => {
137
- const tokenizer = new OpenAITokenizer('gpt-4');
138
-
139
- t.notThrows(() => {
140
- tokenizer.free();
141
- });
142
- });
143
-
144
- test('OpenAITokenizer handles special characters', t => {
145
- const tokenizer = new OpenAITokenizer('gpt-4');
146
- const text = '你好世界 🌍 Привет мир';
147
- const count = tokenizer.encode(text);
148
-
149
- t.true(count > 0);
150
- });
151
-
152
- test('OpenAITokenizer handles code snippets', t => {
153
- const tokenizer = new OpenAITokenizer('gpt-4');
154
- const code = `
155
- function hello() {
156
- console.log("Hello, world!");
157
- }
158
- `;
159
- const count = tokenizer.encode(code);
160
-
161
- t.true(count > 10);
162
- });
163
-
164
- test('OpenAITokenizer works with gpt-3.5-turbo model', t => {
165
- const tokenizer = new OpenAITokenizer('gpt-3.5-turbo');
166
- const count = tokenizer.encode('Hello, world!');
167
-
168
- t.true(count > 0);
169
- t.is(tokenizer.getName(), 'openai-gpt-3.5-turbo');
170
- });
171
-
172
- test('OpenAITokenizer handles long messages', t => {
173
- const tokenizer = new OpenAITokenizer('gpt-4');
174
- const longText = 'Hello '.repeat(1000);
175
- const message: Message = {
176
- role: 'user',
177
- content: longText,
178
- };
179
-
180
- const count = tokenizer.countTokens(message);
181
-
182
- // Should handle long text without crashing
183
- t.true(count > 1000);
184
- });
@@ -1,57 +0,0 @@
1
- /**
2
- * OpenAI tokenizer using tiktoken
3
- * Supports GPT-3.5, GPT-4, and other OpenAI models
4
- */
5
-
6
- import type {Message} from '@/types/core';
7
- import {type TiktokenModel, encoding_for_model, get_encoding} from 'tiktoken';
8
- import type {Tokenizer} from '../../types/tokenization';
9
-
10
- /**
11
- * OpenAI tokenizer using tiktoken for accurate token counting
12
- */
13
- export class OpenAITokenizer implements Tokenizer {
14
- private encoding: ReturnType<typeof get_encoding>;
15
- private modelName: string;
16
-
17
- constructor(modelId?: string) {
18
- this.modelName = modelId || 'gpt-4';
19
-
20
- try {
21
- this.encoding = encoding_for_model(modelId as TiktokenModel);
22
- } catch {
23
- this.encoding = get_encoding('cl100k_base');
24
- }
25
- }
26
-
27
- encode(text: string): number {
28
- try {
29
- const tokens = this.encoding.encode(text);
30
- return tokens.length;
31
- } catch {
32
- return Math.ceil(text.length / 4);
33
- }
34
- }
35
-
36
- countTokens(message: Message): number {
37
- const content = message.content || '';
38
- const role = message.role || '';
39
-
40
- // OpenAI format: each message has overhead for role markers
41
- // <|im_start|>role\ncontent<|im_end|>
42
- const messageOverhead = 4; // Approximate overhead per message
43
-
44
- return this.encode(content) + this.encode(role) + messageOverhead;
45
- }
46
-
47
- getName(): string {
48
- return `openai-${this.modelName}`;
49
- }
50
-
51
- /**
52
- * Clean up encoding resources
53
- */
54
- free(): void {
55
- this.encoding.free();
56
- }
57
- }
@@ -1,5 +0,0 @@
1
- /**
2
- * Tool calling utilities - main exports
3
- */
4
-
5
- export {parseToolCalls} from '@/tool-calling/tool-parser';