@harperfast/agent 0.15.10 → 0.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/agent.js +184 -44
  2. package/package.json +1 -1
package/dist/agent.js CHANGED
@@ -296,8 +296,8 @@ var defaultAnthropicModel = "claude-4-6-opus-latest";
296
296
  var defaultAnthropicCompactionModel = "claude-4-5-haiku-latest";
297
297
  var defaultGoogleModel = "gemini-3-pro";
298
298
  var defaultGoogleCompactionModel = "gemini-2.5-flash-lite";
299
- var defaultOllamaModel = "ollama-qwen3-coder:30b";
300
- var defaultOllamaCompactionModel = "ollama-qwen2.5-coder";
299
+ var defaultOllamaModel = "ollama-qwen3.5";
300
+ var defaultOllamaCompactionModel = "ollama-qwen3.5:2b";
301
301
  var defaultModels = [
302
302
  defaultOpenAIModel,
303
303
  defaultAnthropicModel,
@@ -311,12 +311,45 @@ var defaultCompactionModels = [
311
311
  defaultOllamaCompactionModel
312
312
  ];
313
313
 
314
+ // utils/ollama/normalizeOllamaBaseUrl.ts
315
+ function normalizeOllamaBaseUrl(baseUrl) {
316
+ let url = baseUrl.trim();
317
+ if (!url.startsWith("http://") && !url.startsWith("https://")) {
318
+ url = `http://${url}`;
319
+ }
320
+ const urlObj = new URL(url);
321
+ if (!urlObj.port) {
322
+ urlObj.port = "11434";
323
+ }
324
+ let pathname = urlObj.pathname;
325
+ if (pathname.endsWith("/")) {
326
+ pathname = pathname.slice(0, -1);
327
+ }
328
+ if (!pathname.endsWith("/api")) {
329
+ pathname += "/api";
330
+ }
331
+ urlObj.pathname = pathname;
332
+ return urlObj.toString().replace(/\/$/, "");
333
+ }
334
+
314
335
  // lifecycle/getModel.ts
315
336
  function isOpenAIModel(modelName) {
316
337
  if (!modelName || modelName === defaultOpenAIModel) {
317
338
  return true;
318
339
  }
319
- return !modelName.startsWith("claude-") && !modelName.startsWith("gemini-") && !modelName.startsWith("ollama-");
340
+ return !modelName.startsWith("claude-") && !modelName.startsWith("gemini-") && !modelName.startsWith("ollama-") && !modelName.includes(":");
341
+ }
342
+ function getProvider(modelName) {
343
+ if (modelName.startsWith("claude-")) {
344
+ return "Anthropic";
345
+ }
346
+ if (modelName.startsWith("gemini-")) {
347
+ return "Google";
348
+ }
349
+ if (modelName.startsWith("ollama-") || modelName.includes(":")) {
350
+ return "Ollama";
351
+ }
352
+ return "OpenAI";
320
353
  }
321
354
  function getModel(modelName) {
322
355
  if (modelName.startsWith("claude-")) {
@@ -325,9 +358,9 @@ function getModel(modelName) {
325
358
  if (modelName.startsWith("gemini-")) {
326
359
  return aisdk(google(modelName));
327
360
  }
328
- if (modelName.startsWith("ollama-")) {
361
+ if (modelName.startsWith("ollama-") || modelName.includes(":")) {
329
362
  const ollamaBaseUrl = process.env.OLLAMA_BASE_URL ? normalizeOllamaBaseUrl(process.env.OLLAMA_BASE_URL) : void 0;
330
- const ollamaProvider = ollamaBaseUrl ? createOllama({ baseURL: ollamaBaseUrl }) : ollama;
363
+ const ollamaProvider = ollamaBaseUrl ? createOllama({ baseURL: ollamaBaseUrl, compatibility: "strict" }) : ollama;
331
364
  return aisdk(ollamaProvider(getModelName(modelName)));
332
365
  }
333
366
  return aisdk(openai(modelName));
@@ -340,29 +373,10 @@ function getModelName(modelName) {
340
373
  return modelName;
341
374
  }
342
375
  if (modelName.startsWith("ollama-")) {
343
- return modelName.replace("ollama-", "");
376
+ return modelName.slice(7);
344
377
  }
345
378
  return modelName;
346
379
  }
347
- function normalizeOllamaBaseUrl(baseUrl) {
348
- let url = baseUrl.trim();
349
- if (!url.startsWith("http://") && !url.startsWith("https://")) {
350
- url = `http://${url}`;
351
- }
352
- const urlObj = new URL(url);
353
- if (!urlObj.port) {
354
- urlObj.port = "11434";
355
- }
356
- let pathname = urlObj.pathname;
357
- if (pathname.endsWith("/")) {
358
- pathname = pathname.slice(0, -1);
359
- }
360
- if (!pathname.endsWith("/api")) {
361
- pathname += "/api";
362
- }
363
- urlObj.pathname = pathname;
364
- return urlObj.toString().replace(/\/$/, "");
365
- }
366
380
 
367
381
  // lifecycle/handleExit.ts
368
382
  import { getGlobalTraceProvider } from "@openai/agents";
@@ -2464,6 +2478,66 @@ function createTools() {
2464
2478
  ];
2465
2479
  }
2466
2480
 
2481
+ // utils/ollama/fetchOllamaModels.ts
2482
+ async function fetchOllamaModels() {
2483
+ const ollamaBaseUrl = process.env.OLLAMA_BASE_URL ? normalizeOllamaBaseUrl(process.env.OLLAMA_BASE_URL) : "http://localhost:11434/api";
2484
+ try {
2485
+ const response = await fetch(`${ollamaBaseUrl}/tags`);
2486
+ if (!response.ok) {
2487
+ return [];
2488
+ }
2489
+ const data = await response.json();
2490
+ return data.models.map((m) => m.name);
2491
+ } catch {
2492
+ return [];
2493
+ }
2494
+ }
2495
+
2496
+ // utils/ollama/pullOllamaModel.ts
2497
+ async function pullOllamaModel(modelName, onProgress) {
2498
+ const ollamaBaseUrl = process.env.OLLAMA_BASE_URL ? normalizeOllamaBaseUrl(process.env.OLLAMA_BASE_URL) : "http://localhost:11434/api";
2499
+ const response = await fetch(`${ollamaBaseUrl}/pull`, {
2500
+ method: "POST",
2501
+ body: JSON.stringify({ name: modelName, stream: true })
2502
+ });
2503
+ if (!response.ok) {
2504
+ throw new Error(`Failed to pull Ollama model ${modelName}: ${response.statusText}`);
2505
+ }
2506
+ if (!response.body) {
2507
+ throw new Error(`Failed to pull Ollama model ${modelName}: No response body`);
2508
+ }
2509
+ const reader = response.body.getReader();
2510
+ const decoder = new TextDecoder();
2511
+ while (true) {
2512
+ const { done, value } = await reader.read();
2513
+ if (done) {
2514
+ break;
2515
+ }
2516
+ const chunk = decoder.decode(value, { stream: true });
2517
+ const lines = chunk.split("\n");
2518
+ for (const line of lines) {
2519
+ if (!line.trim()) {
2520
+ continue;
2521
+ }
2522
+ try {
2523
+ const json = JSON.parse(line);
2524
+ if (onProgress) {
2525
+ onProgress(json);
2526
+ }
2527
+ } catch {
2528
+ }
2529
+ }
2530
+ }
2531
+ }
2532
+
2533
+ // utils/ollama/ensureOllamaModel.ts
2534
+ async function ensureOllamaModel(modelName, onProgress) {
2535
+ const models = await fetchOllamaModels();
2536
+ if (!models.includes(modelName)) {
2537
+ await pullOllamaModel(modelName, onProgress);
2538
+ }
2539
+ }
2540
+
2467
2541
  // utils/sessions/createSession.ts
2468
2542
  import { MemorySession as MemorySession3 } from "@openai/agents";
2469
2543
 
@@ -2843,6 +2917,18 @@ async function compactConversation(items) {
2843
2917
  let noticeContent = "... conversation history compacted ...";
2844
2918
  if (trackedState.compactionModel && itemsToCompact.length > 0) {
2845
2919
  try {
2920
+ if (getProvider(trackedState.compactionModel) === "Ollama") {
2921
+ const modelName = getModelName(trackedState.compactionModel);
2922
+ await ensureOllamaModel(modelName, (progress) => {
2923
+ emitToListeners("SetPulling", {
2924
+ modelName,
2925
+ status: progress.status,
2926
+ completed: progress.completed ?? 0,
2927
+ total: progress.total ?? 0
2928
+ });
2929
+ });
2930
+ emitToListeners("SetPulling", null);
2931
+ }
2846
2932
  const agent = new Agent({
2847
2933
  name: "History Compactor",
2848
2934
  model: isOpenAIModel(trackedState.compactionModel) ? trackedState.compactionModel : getModel(trackedState.compactionModel),
@@ -3871,6 +3957,23 @@ var AgentManager = class {
3871
3957
  async runTask(task, isPrompt) {
3872
3958
  this.controller = new AbortController();
3873
3959
  await this.runCompactionIfWeWereIdle();
3960
+ if (getProvider(trackedState.model) === "Ollama") {
3961
+ try {
3962
+ const modelName = getModelName(trackedState.model);
3963
+ await ensureOllamaModel(modelName, (progress) => {
3964
+ emitToListeners("SetPulling", {
3965
+ modelName,
3966
+ status: progress.status,
3967
+ completed: progress.completed ?? 0,
3968
+ total: progress.total ?? 0
3969
+ });
3970
+ });
3971
+ emitToListeners("SetPulling", null);
3972
+ } catch (err) {
3973
+ emitToListeners("SetPulling", null);
3974
+ logError(err);
3975
+ }
3976
+ }
3874
3977
  emitToListeners("SetThinking", true);
3875
3978
  let taskOrState = task;
3876
3979
  const lowerTask = task.toLowerCase();
@@ -3990,6 +4093,7 @@ var ChatProvider = ({
3990
4093
  const [userInputMode, setUserInputMode] = useState3("waiting");
3991
4094
  const [isThinking, setIsThinking] = useState3(false);
3992
4095
  const [isCompacting, setIsCompacting] = useState3(false);
4096
+ const [pullingState, setPullingState] = useState3(null);
3993
4097
  const [focusedArea, setFocusedArea] = useState3("input");
3994
4098
  useListener("PushNewMessages", (messages2) => {
3995
4099
  setMessages((prev) => {
@@ -4007,6 +4111,9 @@ var ChatProvider = ({
4007
4111
  useListener("SetCompacting", (value2) => {
4008
4112
  setIsCompacting(Boolean(value2));
4009
4113
  }, []);
4114
+ useListener("SetPulling", (value2) => {
4115
+ setPullingState(value2);
4116
+ }, []);
4010
4117
  useListener("InterruptThought", () => {
4011
4118
  setIsThinking(false);
4012
4119
  }, []);
@@ -4044,9 +4151,10 @@ var ChatProvider = ({
4044
4151
  userInputMode,
4045
4152
  isThinking,
4046
4153
  isCompacting,
4154
+ pullingState,
4047
4155
  focusedArea,
4048
4156
  setFocusedArea
4049
- }), [messages, userInputMode, isThinking, isCompacting, focusedArea]);
4157
+ }), [messages, userInputMode, isThinking, isCompacting, pullingState, focusedArea]);
4050
4158
  return /* @__PURE__ */ jsx3(ChatContext.Provider, { value, children });
4051
4159
  };
4052
4160
 
@@ -5330,7 +5438,7 @@ function calculatePlaceholder(mode) {
5330
5438
  // ink/components/ChatContent.tsx
5331
5439
  import { jsx as jsx17, jsxs as jsxs9 } from "react/jsx-runtime";
5332
5440
  function ChatContent() {
5333
- const { messages, isThinking, isCompacting, focusedArea, setFocusedArea } = useChat();
5441
+ const { messages, isThinking, isCompacting, pullingState, focusedArea, setFocusedArea } = useChat();
5334
5442
  const { payload } = useApproval();
5335
5443
  const size = useTerminalSize();
5336
5444
  useMessageListener();
@@ -5426,6 +5534,7 @@ function ChatContent() {
5426
5534
  return 6;
5427
5535
  }, []);
5428
5536
  const availableTextWidth = timelineWidth - 4;
5537
+ const pullingHeight = pullingState ? 1 : 0;
5429
5538
  const lineItems = useMemo11(() => {
5430
5539
  const acc = [];
5431
5540
  for (const msg of messages) {
@@ -5512,7 +5621,7 @@ function ChatContent() {
5512
5621
  ], []);
5513
5622
  const timelineTitle = "TIMELINE:";
5514
5623
  const timelineHeaderWidth = timelineWidth - 1;
5515
- const showSpinner = isCompacting || isThinking;
5624
+ const showSpinner = isCompacting || isThinking || Boolean(pullingState);
5516
5625
  const timelineDashes = timelineHeaderWidth - timelineTitle.length - (showSpinner ? 5 : 0);
5517
5626
  const tabsTotalWidth = tabs.reduce((acc, t) => acc + t.label.length + 2, 0) + (tabs.length - 1);
5518
5627
  const statusDashes = Math.max(0, statusWidth - tabsTotalWidth - 2);
@@ -5549,7 +5658,7 @@ function ChatContent() {
5549
5658
  "\u256E"
5550
5659
  ] })
5551
5660
  ] }),
5552
- /* @__PURE__ */ jsxs9(Box10, { flexDirection: "row", height: contentHeight - 2, children: [
5661
+ /* @__PURE__ */ jsxs9(Box10, { flexDirection: "row", height: contentHeight - 2 - pullingHeight, children: [
5553
5662
  /* @__PURE__ */ jsx17(
5554
5663
  Box10,
5555
5664
  {
@@ -5562,7 +5671,7 @@ function ChatContent() {
5562
5671
  {
5563
5672
  items: lineItems,
5564
5673
  itemHeight: 1,
5565
- height: contentHeight - 2,
5674
+ height: contentHeight - 2 - pullingHeight,
5566
5675
  selectedIndex,
5567
5676
  renderOverflowTop: useCallback5((count) => /* @__PURE__ */ jsxs9(Box10, { children: [
5568
5677
  /* @__PURE__ */ jsx17(Text10, { color: "gray", dimColor: true, children: "\u2502" }),
@@ -5630,17 +5739,36 @@ function ChatContent() {
5630
5739
  /* @__PURE__ */ jsx17(Box10, { marginTop: 1, children: /* @__PURE__ */ jsx17(SettingsView, { isDense: true }) })
5631
5740
  ] }),
5632
5741
  activeTab === "planDescription" && /* @__PURE__ */ jsx17(PlanView, {}),
5633
- activeTab === "actions" && /* @__PURE__ */ jsx17(ActionsView, { height: contentHeight - 2, isFocused: focusedArea === "status" })
5742
+ activeTab === "actions" && /* @__PURE__ */ jsx17(
5743
+ ActionsView,
5744
+ {
5745
+ height: contentHeight - 2 - pullingHeight,
5746
+ isFocused: focusedArea === "status"
5747
+ }
5748
+ )
5634
5749
  ] })
5635
5750
  }
5636
5751
  )
5637
5752
  ] }),
5638
- /* @__PURE__ */ jsxs9(Box10, { flexDirection: "row", height: 1, children: [
5639
- /* @__PURE__ */ jsx17(Text10, { color: junctionLeftColor, children: "\u2522" }),
5640
- /* @__PURE__ */ jsx17(Text10, { color: timelineBottomColor, children: "\u2501".repeat(timelineWidth - 1) }),
5641
- /* @__PURE__ */ jsx17(Text10, { color: junctionMiddleColor, children: "\u2537" }),
5642
- /* @__PURE__ */ jsx17(Text10, { color: statusBottomColor, children: "\u2501".repeat(Math.max(0, statusWidth - 2)) }),
5643
- /* @__PURE__ */ jsx17(Text10, { color: junctionRightColor, children: "\u252A" })
5753
+ /* @__PURE__ */ jsxs9(Box10, { flexDirection: "column", children: [
5754
+ pullingState && /* @__PURE__ */ jsx17(Box10, { paddingLeft: 2, paddingRight: 2, marginBottom: 0, children: /* @__PURE__ */ jsxs9(Text10, { color: "yellow", children: [
5755
+ `\uF019 Downloading `,
5756
+ /* @__PURE__ */ jsx17(Text10, { bold: true, children: pullingState.modelName }),
5757
+ ` from Ollama... `,
5758
+ /* @__PURE__ */ jsx17(Text10, { dimColor: true, children: pullingState.status === "pulling manifest" ? "initializing" : pullingState.status }),
5759
+ pullingState.total > 0 && /* @__PURE__ */ jsxs9(Text10, { children: [
5760
+ ` [${"=".repeat(Math.floor(pullingState.completed / pullingState.total * 20))}${" ".repeat(20 - Math.floor(pullingState.completed / pullingState.total * 20))}] `,
5761
+ Math.round(pullingState.completed / pullingState.total * 100),
5762
+ "%"
5763
+ ] })
5764
+ ] }) }),
5765
+ /* @__PURE__ */ jsxs9(Box10, { flexDirection: "row", height: 1, children: [
5766
+ /* @__PURE__ */ jsx17(Text10, { color: junctionLeftColor, children: "\u2522" }),
5767
+ /* @__PURE__ */ jsx17(Text10, { color: timelineBottomColor, children: "\u2501".repeat(timelineWidth - 1) }),
5768
+ /* @__PURE__ */ jsx17(Text10, { color: junctionMiddleColor, children: "\u2537" }),
5769
+ /* @__PURE__ */ jsx17(Text10, { color: statusBottomColor, children: "\u2501".repeat(Math.max(0, statusWidth - 2)) }),
5770
+ /* @__PURE__ */ jsx17(Text10, { color: junctionRightColor, children: "\u252A" })
5771
+ ] })
5644
5772
  ] }),
5645
5773
  /* @__PURE__ */ jsx17(UserInput, {})
5646
5774
  ] });
@@ -5845,7 +5973,7 @@ function DiffApprovalView() {
5845
5973
  // ink/configurationWizard/ConfigurationWizard.tsx
5846
5974
  import { Box as Box18, useInput as useInput10 } from "ink";
5847
5975
  import { Step, Stepper } from "ink-stepper";
5848
- import { useState as useState17 } from "react";
5976
+ import { useEffect as useEffect14, useState as useState17 } from "react";
5849
5977
 
5850
5978
  // utils/files/getEnvVarForProvider.ts
5851
5979
  function getEnvVarForProvider(provider) {
@@ -6040,7 +6168,7 @@ var modelsByProvider = {
6040
6168
  OpenAI: [defaultOpenAIModel, "gpt-5.0", defaultOpenAICompactionModel],
6041
6169
  Anthropic: [defaultAnthropicModel, "claude-4-5-sonnet-latest", defaultAnthropicCompactionModel],
6042
6170
  Google: [defaultGoogleModel, "gemini-3-flash", "gemini-2.5-flash", defaultGoogleCompactionModel],
6043
- Ollama: [defaultOllamaModel, "mistral", defaultOllamaCompactionModel]
6171
+ Ollama: [defaultOllamaModel, "ollama-qwen3.5:27b", defaultOllamaCompactionModel]
6044
6172
  };
6045
6173
  var compactorModelsByProvider = {
6046
6174
  OpenAI: modelsByProvider.OpenAI.slice().reverse(),
@@ -6194,13 +6322,23 @@ function StepperProgress({ steps, currentStep }) {
6194
6322
  import { jsx as jsx25, jsxs as jsxs17 } from "react/jsx-runtime";
6195
6323
  function ConfigurationWizard({ onComplete }) {
6196
6324
  const [provider, setProvider] = useState17("OpenAI");
6325
+ const [ollamaModels, setOllamaModels] = useState17([]);
6326
+ useEffect14(() => {
6327
+ if (provider === "Ollama") {
6328
+ fetchOllamaModels().then((models2) => {
6329
+ if (models2.length > 0) {
6330
+ setOllamaModels(models2);
6331
+ }
6332
+ });
6333
+ }
6334
+ }, [provider]);
6197
6335
  useInput10((input, key) => {
6198
6336
  if (key.ctrl && input === "x") {
6199
6337
  emitToListeners("ExitUI", void 0);
6200
6338
  }
6201
6339
  });
6202
- const models = modelsByProvider[provider];
6203
- const compactorModels = compactorModelsByProvider[provider];
6340
+ const models = provider === "Ollama" && ollamaModels.length > 0 ? [.../* @__PURE__ */ new Set([...ollamaModels, ...modelsByProvider[provider]])] : modelsByProvider[provider];
6341
+ const compactorModels = provider === "Ollama" && ollamaModels.length > 0 ? [.../* @__PURE__ */ new Set([...ollamaModels, ...compactorModelsByProvider[provider]])] : compactorModelsByProvider[provider];
6204
6342
  return /* @__PURE__ */ jsx25(Box18, { flexDirection: "column", padding: 1, minHeight: 10, children: /* @__PURE__ */ jsxs17(
6205
6343
  Stepper,
6206
6344
  {
@@ -6245,7 +6383,8 @@ function ConfigurationWizard({ onComplete }) {
6245
6383
  title: "What model would you like to use?",
6246
6384
  models,
6247
6385
  onConfirm: (m) => {
6248
- updateEnv("HARPER_AGENT_MODEL", m);
6386
+ const finalModelName = provider === "Ollama" && !m.startsWith("ollama-") && !m.includes(":") ? `ollama-${m}` : m;
6387
+ updateEnv("HARPER_AGENT_MODEL", finalModelName);
6249
6388
  goNext();
6250
6389
  },
6251
6390
  onBack: goBack
@@ -6257,7 +6396,8 @@ function ConfigurationWizard({ onComplete }) {
6257
6396
  title: "What model should we use for memory compaction?",
6258
6397
  models: compactorModels,
6259
6398
  onConfirm: (m) => {
6260
- updateEnv("HARPER_AGENT_COMPACTION_MODEL", m);
6399
+ const finalModelName = provider === "Ollama" && !m.startsWith("ollama-") && !m.includes(":") ? `ollama-${m}` : m;
6400
+ updateEnv("HARPER_AGENT_COMPACTION_MODEL", finalModelName);
6261
6401
  goNext();
6262
6402
  },
6263
6403
  onBack: goBack
@@ -6709,7 +6849,7 @@ function ensureApiKey() {
6709
6849
  requiredEnvVars.add("ANTHROPIC_API_KEY");
6710
6850
  } else if (model.startsWith("gemini-")) {
6711
6851
  requiredEnvVars.add("GOOGLE_GENERATIVE_AI_API_KEY");
6712
- } else if (model.startsWith("ollama-")) {
6852
+ } else if (model.startsWith("ollama-") || model.includes(":")) {
6713
6853
  } else {
6714
6854
  requiredEnvVars.add("OPENAI_API_KEY");
6715
6855
  }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@harperfast/agent",
3
3
  "description": "AI to help you with Harper app management",
4
- "version": "0.15.10",
4
+ "version": "0.16.0",
5
5
  "main": "dist/agent.js",
6
6
  "repository": "github:HarperFast/harper-agent",
7
7
  "bugs": {