@harperfast/agent 0.15.10 → 0.16.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/agent.js +217 -53
  2. package/package.json +1 -1
package/dist/agent.js CHANGED
@@ -296,8 +296,8 @@ var defaultAnthropicModel = "claude-4-6-opus-latest";
296
296
  var defaultAnthropicCompactionModel = "claude-4-5-haiku-latest";
297
297
  var defaultGoogleModel = "gemini-3-pro";
298
298
  var defaultGoogleCompactionModel = "gemini-2.5-flash-lite";
299
- var defaultOllamaModel = "ollama-qwen3-coder:30b";
300
- var defaultOllamaCompactionModel = "ollama-qwen2.5-coder";
299
+ var defaultOllamaModel = "ollama-qwen3.5";
300
+ var defaultOllamaCompactionModel = "ollama-qwen3.5:2b";
301
301
  var defaultModels = [
302
302
  defaultOpenAIModel,
303
303
  defaultAnthropicModel,
@@ -311,12 +311,45 @@ var defaultCompactionModels = [
311
311
  defaultOllamaCompactionModel
312
312
  ];
313
313
 
314
+ // utils/ollama/normalizeOllamaBaseUrl.ts
315
+ function normalizeOllamaBaseUrl(baseUrl) {
316
+ let url = baseUrl.trim();
317
+ if (!url.startsWith("http://") && !url.startsWith("https://")) {
318
+ url = `http://${url}`;
319
+ }
320
+ const urlObj = new URL(url);
321
+ if (!urlObj.port) {
322
+ urlObj.port = "11434";
323
+ }
324
+ let pathname = urlObj.pathname;
325
+ if (pathname.endsWith("/")) {
326
+ pathname = pathname.slice(0, -1);
327
+ }
328
+ if (!pathname.endsWith("/api")) {
329
+ pathname += "/api";
330
+ }
331
+ urlObj.pathname = pathname;
332
+ return urlObj.toString().replace(/\/$/, "");
333
+ }
334
+
314
335
  // lifecycle/getModel.ts
315
336
  function isOpenAIModel(modelName) {
316
337
  if (!modelName || modelName === defaultOpenAIModel) {
317
338
  return true;
318
339
  }
319
- return !modelName.startsWith("claude-") && !modelName.startsWith("gemini-") && !modelName.startsWith("ollama-");
340
+ return !modelName.startsWith("claude-") && !modelName.startsWith("gemini-") && !modelName.startsWith("ollama-") && !modelName.includes(":");
341
+ }
342
+ function getProvider(modelName) {
343
+ if (modelName.startsWith("claude-")) {
344
+ return "Anthropic";
345
+ }
346
+ if (modelName.startsWith("gemini-")) {
347
+ return "Google";
348
+ }
349
+ if (modelName.startsWith("ollama-") || modelName.includes(":")) {
350
+ return "Ollama";
351
+ }
352
+ return "OpenAI";
320
353
  }
321
354
  function getModel(modelName) {
322
355
  if (modelName.startsWith("claude-")) {
@@ -325,9 +358,9 @@ function getModel(modelName) {
325
358
  if (modelName.startsWith("gemini-")) {
326
359
  return aisdk(google(modelName));
327
360
  }
328
- if (modelName.startsWith("ollama-")) {
361
+ if (modelName.startsWith("ollama-") || modelName.includes(":")) {
329
362
  const ollamaBaseUrl = process.env.OLLAMA_BASE_URL ? normalizeOllamaBaseUrl(process.env.OLLAMA_BASE_URL) : void 0;
330
- const ollamaProvider = ollamaBaseUrl ? createOllama({ baseURL: ollamaBaseUrl }) : ollama;
363
+ const ollamaProvider = ollamaBaseUrl ? createOllama({ baseURL: ollamaBaseUrl, compatibility: "strict" }) : ollama;
331
364
  return aisdk(ollamaProvider(getModelName(modelName)));
332
365
  }
333
366
  return aisdk(openai(modelName));
@@ -340,29 +373,10 @@ function getModelName(modelName) {
340
373
  return modelName;
341
374
  }
342
375
  if (modelName.startsWith("ollama-")) {
343
- return modelName.replace("ollama-", "");
376
+ return modelName.slice(7);
344
377
  }
345
378
  return modelName;
346
379
  }
347
- function normalizeOllamaBaseUrl(baseUrl) {
348
- let url = baseUrl.trim();
349
- if (!url.startsWith("http://") && !url.startsWith("https://")) {
350
- url = `http://${url}`;
351
- }
352
- const urlObj = new URL(url);
353
- if (!urlObj.port) {
354
- urlObj.port = "11434";
355
- }
356
- let pathname = urlObj.pathname;
357
- if (pathname.endsWith("/")) {
358
- pathname = pathname.slice(0, -1);
359
- }
360
- if (!pathname.endsWith("/api")) {
361
- pathname += "/api";
362
- }
363
- urlObj.pathname = pathname;
364
- return urlObj.toString().replace(/\/$/, "");
365
- }
366
380
 
367
381
  // lifecycle/handleExit.ts
368
382
  import { getGlobalTraceProvider } from "@openai/agents";
@@ -1134,7 +1148,23 @@ async function execute11(operation) {
1134
1148
  if (needed) {
1135
1149
  const content = await execute10({ skill: needed });
1136
1150
  console.error(`Understanding ${needed} is necessary before applying this patch.`);
1137
- return { status: "failed, skill guarded", output: content };
1151
+ return {
1152
+ status: "skill_required",
1153
+ output: `apply_patch blocked: you must understand the "${needed}" skill before writing to "${operation.path}".
1154
+
1155
+ REQUIRED NEXT STEP: Call apply_patch again with the EXACT same arguments:
1156
+ type="${operation.type}", path="${operation.path}", diff=<same diff as before>
1157
+
1158
+ PROHIBITED \u2014 do NOT attempt any of the following to work around this:
1159
+ - Use the shell tool to write or create files
1160
+ - Call set_patch_auto_approve or set_shell_auto_approve
1161
+ - Use any other workaround
1162
+
1163
+ Read the skill content below, then retry apply_patch immediately.
1164
+
1165
+ --- Skill: ${needed} ---
1166
+ ${content}`
1167
+ };
1138
1168
  }
1139
1169
  switch (operation.type) {
1140
1170
  case "create_file":
@@ -1165,7 +1195,12 @@ async function execute11(operation) {
1165
1195
  function createApplyPatchTool() {
1166
1196
  return tool11({
1167
1197
  name: "apply_patch",
1168
- description: "Applies a patch (create, update, or delete a file) to the workspace.",
1198
+ description: `Applies a patch (create, update, or delete a file) to the workspace.
1199
+
1200
+ SKILL GUARD: Some file paths require reading a specific Harper skill before writing. If this tool returns status "skill_required", you MUST:
1201
+ 1. Read the skill content included in the response carefully.
1202
+ 2. Call apply_patch again with the EXACT same type, path, and diff arguments.
1203
+ Do NOT use shell, set_patch_auto_approve, set_shell_auto_approve, or any other workaround to bypass this requirement.`,
1169
1204
  parameters: ApplyPatchParameters,
1170
1205
  needsApproval,
1171
1206
  execute: execute11
@@ -2464,6 +2499,66 @@ function createTools() {
2464
2499
  ];
2465
2500
  }
2466
2501
 
2502
+ // utils/ollama/fetchOllamaModels.ts
2503
+ async function fetchOllamaModels() {
2504
+ const ollamaBaseUrl = process.env.OLLAMA_BASE_URL ? normalizeOllamaBaseUrl(process.env.OLLAMA_BASE_URL) : "http://localhost:11434/api";
2505
+ try {
2506
+ const response = await fetch(`${ollamaBaseUrl}/tags`);
2507
+ if (!response.ok) {
2508
+ return [];
2509
+ }
2510
+ const data = await response.json();
2511
+ return data.models.map((m) => m.name);
2512
+ } catch {
2513
+ return [];
2514
+ }
2515
+ }
2516
+
2517
+ // utils/ollama/pullOllamaModel.ts
2518
+ async function pullOllamaModel(modelName, onProgress) {
2519
+ const ollamaBaseUrl = process.env.OLLAMA_BASE_URL ? normalizeOllamaBaseUrl(process.env.OLLAMA_BASE_URL) : "http://localhost:11434/api";
2520
+ const response = await fetch(`${ollamaBaseUrl}/pull`, {
2521
+ method: "POST",
2522
+ body: JSON.stringify({ name: modelName, stream: true })
2523
+ });
2524
+ if (!response.ok) {
2525
+ throw new Error(`Failed to pull Ollama model ${modelName}: ${response.statusText}`);
2526
+ }
2527
+ if (!response.body) {
2528
+ throw new Error(`Failed to pull Ollama model ${modelName}: No response body`);
2529
+ }
2530
+ const reader = response.body.getReader();
2531
+ const decoder = new TextDecoder();
2532
+ while (true) {
2533
+ const { done, value } = await reader.read();
2534
+ if (done) {
2535
+ break;
2536
+ }
2537
+ const chunk = decoder.decode(value, { stream: true });
2538
+ const lines = chunk.split("\n");
2539
+ for (const line of lines) {
2540
+ if (!line.trim()) {
2541
+ continue;
2542
+ }
2543
+ try {
2544
+ const json = JSON.parse(line);
2545
+ if (onProgress) {
2546
+ onProgress(json);
2547
+ }
2548
+ } catch {
2549
+ }
2550
+ }
2551
+ }
2552
+ }
2553
+
2554
+ // utils/ollama/ensureOllamaModel.ts
2555
+ async function ensureOllamaModel(modelName, onProgress) {
2556
+ const models = await fetchOllamaModels();
2557
+ if (!models.includes(modelName)) {
2558
+ await pullOllamaModel(modelName, onProgress);
2559
+ }
2560
+ }
2561
+
2467
2562
  // utils/sessions/createSession.ts
2468
2563
  import { MemorySession as MemorySession3 } from "@openai/agents";
2469
2564
 
@@ -2843,6 +2938,18 @@ async function compactConversation(items) {
2843
2938
  let noticeContent = "... conversation history compacted ...";
2844
2939
  if (trackedState.compactionModel && itemsToCompact.length > 0) {
2845
2940
  try {
2941
+ if (getProvider(trackedState.compactionModel) === "Ollama") {
2942
+ const modelName = getModelName(trackedState.compactionModel);
2943
+ await ensureOllamaModel(modelName, (progress) => {
2944
+ emitToListeners("SetPulling", {
2945
+ modelName,
2946
+ status: progress.status,
2947
+ completed: progress.completed ?? 0,
2948
+ total: progress.total ?? 0
2949
+ });
2950
+ });
2951
+ emitToListeners("SetPulling", null);
2952
+ }
2846
2953
  const agent = new Agent({
2847
2954
  name: "History Compactor",
2848
2955
  model: isOpenAIModel(trackedState.compactionModel) ? trackedState.compactionModel : getModel(trackedState.compactionModel),
@@ -3724,7 +3831,7 @@ async function runAgentForOnePass(agent, session, input, controller, isPrompt) {
3724
3831
  }
3725
3832
 
3726
3833
  // agent/AgentManager.ts
3727
- var AgentManager = class {
3834
+ var AgentManager = class _AgentManager {
3728
3835
  isInitialized = false;
3729
3836
  controller = null;
3730
3837
  queuedUserInputs = [];
@@ -3732,17 +3839,20 @@ var AgentManager = class {
3732
3839
  agent = null;
3733
3840
  session = null;
3734
3841
  initialMessages = [];
3735
- async initialize() {
3736
- if (this.isInitialized) {
3737
- return;
3738
- }
3739
- this.agent = new Agent3({
3842
+ static instantiateAgent(tools) {
3843
+ return new Agent3({
3740
3844
  name: "Harper Agent",
3741
3845
  model: isOpenAIModel(trackedState.model) ? trackedState.model : getModel(trackedState.model),
3742
3846
  modelSettings: getModelSettings(trackedState.model),
3743
3847
  instructions: readAgentSkillsRoot() || defaultInstructions(),
3744
- tools: createTools()
3848
+ tools
3745
3849
  });
3850
+ }
3851
+ async initialize() {
3852
+ if (this.isInitialized) {
3853
+ return;
3854
+ }
3855
+ this.agent = _AgentManager.instantiateAgent(createTools());
3746
3856
  this.session = createSession(trackedState.sessionPath);
3747
3857
  try {
3748
3858
  const plan = await this.session?.getPlanState?.();
@@ -3871,6 +3981,23 @@ var AgentManager = class {
3871
3981
  async runTask(task, isPrompt) {
3872
3982
  this.controller = new AbortController();
3873
3983
  await this.runCompactionIfWeWereIdle();
3984
+ if (getProvider(trackedState.model) === "Ollama") {
3985
+ try {
3986
+ const modelName = getModelName(trackedState.model);
3987
+ await ensureOllamaModel(modelName, (progress) => {
3988
+ emitToListeners("SetPulling", {
3989
+ modelName,
3990
+ status: progress.status,
3991
+ completed: progress.completed ?? 0,
3992
+ total: progress.total ?? 0
3993
+ });
3994
+ });
3995
+ emitToListeners("SetPulling", null);
3996
+ } catch (err) {
3997
+ emitToListeners("SetPulling", null);
3998
+ logError(err);
3999
+ }
4000
+ }
3874
4001
  emitToListeners("SetThinking", true);
3875
4002
  let taskOrState = task;
3876
4003
  const lowerTask = task.toLowerCase();
@@ -3990,6 +4117,7 @@ var ChatProvider = ({
3990
4117
  const [userInputMode, setUserInputMode] = useState3("waiting");
3991
4118
  const [isThinking, setIsThinking] = useState3(false);
3992
4119
  const [isCompacting, setIsCompacting] = useState3(false);
4120
+ const [pullingState, setPullingState] = useState3(null);
3993
4121
  const [focusedArea, setFocusedArea] = useState3("input");
3994
4122
  useListener("PushNewMessages", (messages2) => {
3995
4123
  setMessages((prev) => {
@@ -4007,6 +4135,9 @@ var ChatProvider = ({
4007
4135
  useListener("SetCompacting", (value2) => {
4008
4136
  setIsCompacting(Boolean(value2));
4009
4137
  }, []);
4138
+ useListener("SetPulling", (value2) => {
4139
+ setPullingState(value2);
4140
+ }, []);
4010
4141
  useListener("InterruptThought", () => {
4011
4142
  setIsThinking(false);
4012
4143
  }, []);
@@ -4044,9 +4175,10 @@ var ChatProvider = ({
4044
4175
  userInputMode,
4045
4176
  isThinking,
4046
4177
  isCompacting,
4178
+ pullingState,
4047
4179
  focusedArea,
4048
4180
  setFocusedArea
4049
- }), [messages, userInputMode, isThinking, isCompacting, focusedArea]);
4181
+ }), [messages, userInputMode, isThinking, isCompacting, pullingState, focusedArea]);
4050
4182
  return /* @__PURE__ */ jsx3(ChatContext.Provider, { value, children });
4051
4183
  };
4052
4184
 
@@ -5330,7 +5462,7 @@ function calculatePlaceholder(mode) {
5330
5462
  // ink/components/ChatContent.tsx
5331
5463
  import { jsx as jsx17, jsxs as jsxs9 } from "react/jsx-runtime";
5332
5464
  function ChatContent() {
5333
- const { messages, isThinking, isCompacting, focusedArea, setFocusedArea } = useChat();
5465
+ const { messages, isThinking, isCompacting, pullingState, focusedArea, setFocusedArea } = useChat();
5334
5466
  const { payload } = useApproval();
5335
5467
  const size = useTerminalSize();
5336
5468
  useMessageListener();
@@ -5426,6 +5558,7 @@ function ChatContent() {
5426
5558
  return 6;
5427
5559
  }, []);
5428
5560
  const availableTextWidth = timelineWidth - 4;
5561
+ const pullingHeight = pullingState ? 1 : 0;
5429
5562
  const lineItems = useMemo11(() => {
5430
5563
  const acc = [];
5431
5564
  for (const msg of messages) {
@@ -5512,7 +5645,7 @@ function ChatContent() {
5512
5645
  ], []);
5513
5646
  const timelineTitle = "TIMELINE:";
5514
5647
  const timelineHeaderWidth = timelineWidth - 1;
5515
- const showSpinner = isCompacting || isThinking;
5648
+ const showSpinner = isCompacting || isThinking || Boolean(pullingState);
5516
5649
  const timelineDashes = timelineHeaderWidth - timelineTitle.length - (showSpinner ? 5 : 0);
5517
5650
  const tabsTotalWidth = tabs.reduce((acc, t) => acc + t.label.length + 2, 0) + (tabs.length - 1);
5518
5651
  const statusDashes = Math.max(0, statusWidth - tabsTotalWidth - 2);
@@ -5549,7 +5682,7 @@ function ChatContent() {
5549
5682
  "\u256E"
5550
5683
  ] })
5551
5684
  ] }),
5552
- /* @__PURE__ */ jsxs9(Box10, { flexDirection: "row", height: contentHeight - 2, children: [
5685
+ /* @__PURE__ */ jsxs9(Box10, { flexDirection: "row", height: contentHeight - 2 - pullingHeight, children: [
5553
5686
  /* @__PURE__ */ jsx17(
5554
5687
  Box10,
5555
5688
  {
@@ -5562,7 +5695,7 @@ function ChatContent() {
5562
5695
  {
5563
5696
  items: lineItems,
5564
5697
  itemHeight: 1,
5565
- height: contentHeight - 2,
5698
+ height: contentHeight - 2 - pullingHeight,
5566
5699
  selectedIndex,
5567
5700
  renderOverflowTop: useCallback5((count) => /* @__PURE__ */ jsxs9(Box10, { children: [
5568
5701
  /* @__PURE__ */ jsx17(Text10, { color: "gray", dimColor: true, children: "\u2502" }),
@@ -5630,17 +5763,36 @@ function ChatContent() {
5630
5763
  /* @__PURE__ */ jsx17(Box10, { marginTop: 1, children: /* @__PURE__ */ jsx17(SettingsView, { isDense: true }) })
5631
5764
  ] }),
5632
5765
  activeTab === "planDescription" && /* @__PURE__ */ jsx17(PlanView, {}),
5633
- activeTab === "actions" && /* @__PURE__ */ jsx17(ActionsView, { height: contentHeight - 2, isFocused: focusedArea === "status" })
5766
+ activeTab === "actions" && /* @__PURE__ */ jsx17(
5767
+ ActionsView,
5768
+ {
5769
+ height: contentHeight - 2 - pullingHeight,
5770
+ isFocused: focusedArea === "status"
5771
+ }
5772
+ )
5634
5773
  ] })
5635
5774
  }
5636
5775
  )
5637
5776
  ] }),
5638
- /* @__PURE__ */ jsxs9(Box10, { flexDirection: "row", height: 1, children: [
5639
- /* @__PURE__ */ jsx17(Text10, { color: junctionLeftColor, children: "\u2522" }),
5640
- /* @__PURE__ */ jsx17(Text10, { color: timelineBottomColor, children: "\u2501".repeat(timelineWidth - 1) }),
5641
- /* @__PURE__ */ jsx17(Text10, { color: junctionMiddleColor, children: "\u2537" }),
5642
- /* @__PURE__ */ jsx17(Text10, { color: statusBottomColor, children: "\u2501".repeat(Math.max(0, statusWidth - 2)) }),
5643
- /* @__PURE__ */ jsx17(Text10, { color: junctionRightColor, children: "\u252A" })
5777
+ /* @__PURE__ */ jsxs9(Box10, { flexDirection: "column", children: [
5778
+ pullingState && /* @__PURE__ */ jsx17(Box10, { paddingLeft: 2, paddingRight: 2, marginBottom: 0, children: /* @__PURE__ */ jsxs9(Text10, { color: "yellow", children: [
5779
+ `\uF019 Downloading `,
5780
+ /* @__PURE__ */ jsx17(Text10, { bold: true, children: pullingState.modelName }),
5781
+ ` from Ollama... `,
5782
+ /* @__PURE__ */ jsx17(Text10, { dimColor: true, children: pullingState.status === "pulling manifest" ? "initializing" : pullingState.status }),
5783
+ pullingState.total > 0 && /* @__PURE__ */ jsxs9(Text10, { children: [
5784
+ ` [${"=".repeat(Math.floor(pullingState.completed / pullingState.total * 20))}${" ".repeat(20 - Math.floor(pullingState.completed / pullingState.total * 20))}] `,
5785
+ Math.round(pullingState.completed / pullingState.total * 100),
5786
+ "%"
5787
+ ] })
5788
+ ] }) }),
5789
+ /* @__PURE__ */ jsxs9(Box10, { flexDirection: "row", height: 1, children: [
5790
+ /* @__PURE__ */ jsx17(Text10, { color: junctionLeftColor, children: "\u2522" }),
5791
+ /* @__PURE__ */ jsx17(Text10, { color: timelineBottomColor, children: "\u2501".repeat(timelineWidth - 1) }),
5792
+ /* @__PURE__ */ jsx17(Text10, { color: junctionMiddleColor, children: "\u2537" }),
5793
+ /* @__PURE__ */ jsx17(Text10, { color: statusBottomColor, children: "\u2501".repeat(Math.max(0, statusWidth - 2)) }),
5794
+ /* @__PURE__ */ jsx17(Text10, { color: junctionRightColor, children: "\u252A" })
5795
+ ] })
5644
5796
  ] }),
5645
5797
  /* @__PURE__ */ jsx17(UserInput, {})
5646
5798
  ] });
@@ -5845,7 +5997,7 @@ function DiffApprovalView() {
5845
5997
  // ink/configurationWizard/ConfigurationWizard.tsx
5846
5998
  import { Box as Box18, useInput as useInput10 } from "ink";
5847
5999
  import { Step, Stepper } from "ink-stepper";
5848
- import { useState as useState17 } from "react";
6000
+ import { useEffect as useEffect14, useState as useState17 } from "react";
5849
6001
 
5850
6002
  // utils/files/getEnvVarForProvider.ts
5851
6003
  function getEnvVarForProvider(provider) {
@@ -6040,7 +6192,7 @@ var modelsByProvider = {
6040
6192
  OpenAI: [defaultOpenAIModel, "gpt-5.0", defaultOpenAICompactionModel],
6041
6193
  Anthropic: [defaultAnthropicModel, "claude-4-5-sonnet-latest", defaultAnthropicCompactionModel],
6042
6194
  Google: [defaultGoogleModel, "gemini-3-flash", "gemini-2.5-flash", defaultGoogleCompactionModel],
6043
- Ollama: [defaultOllamaModel, "mistral", defaultOllamaCompactionModel]
6195
+ Ollama: [defaultOllamaModel, "ollama-qwen3.5:27b", defaultOllamaCompactionModel]
6044
6196
  };
6045
6197
  var compactorModelsByProvider = {
6046
6198
  OpenAI: modelsByProvider.OpenAI.slice().reverse(),
@@ -6194,13 +6346,23 @@ function StepperProgress({ steps, currentStep }) {
6194
6346
  import { jsx as jsx25, jsxs as jsxs17 } from "react/jsx-runtime";
6195
6347
  function ConfigurationWizard({ onComplete }) {
6196
6348
  const [provider, setProvider] = useState17("OpenAI");
6349
+ const [ollamaModels, setOllamaModels] = useState17([]);
6350
+ useEffect14(() => {
6351
+ if (provider === "Ollama") {
6352
+ fetchOllamaModels().then((models2) => {
6353
+ if (models2.length > 0) {
6354
+ setOllamaModels(models2);
6355
+ }
6356
+ });
6357
+ }
6358
+ }, [provider]);
6197
6359
  useInput10((input, key) => {
6198
6360
  if (key.ctrl && input === "x") {
6199
6361
  emitToListeners("ExitUI", void 0);
6200
6362
  }
6201
6363
  });
6202
- const models = modelsByProvider[provider];
6203
- const compactorModels = compactorModelsByProvider[provider];
6364
+ const models = provider === "Ollama" && ollamaModels.length > 0 ? [.../* @__PURE__ */ new Set([...ollamaModels, ...modelsByProvider[provider]])] : modelsByProvider[provider];
6365
+ const compactorModels = provider === "Ollama" && ollamaModels.length > 0 ? [.../* @__PURE__ */ new Set([...ollamaModels, ...compactorModelsByProvider[provider]])] : compactorModelsByProvider[provider];
6204
6366
  return /* @__PURE__ */ jsx25(Box18, { flexDirection: "column", padding: 1, minHeight: 10, children: /* @__PURE__ */ jsxs17(
6205
6367
  Stepper,
6206
6368
  {
@@ -6245,7 +6407,8 @@ function ConfigurationWizard({ onComplete }) {
6245
6407
  title: "What model would you like to use?",
6246
6408
  models,
6247
6409
  onConfirm: (m) => {
6248
- updateEnv("HARPER_AGENT_MODEL", m);
6410
+ const finalModelName = provider === "Ollama" && !m.startsWith("ollama-") && !m.includes(":") ? `ollama-${m}` : m;
6411
+ updateEnv("HARPER_AGENT_MODEL", finalModelName);
6249
6412
  goNext();
6250
6413
  },
6251
6414
  onBack: goBack
@@ -6257,7 +6420,8 @@ function ConfigurationWizard({ onComplete }) {
6257
6420
  title: "What model should we use for memory compaction?",
6258
6421
  models: compactorModels,
6259
6422
  onConfirm: (m) => {
6260
- updateEnv("HARPER_AGENT_COMPACTION_MODEL", m);
6423
+ const finalModelName = provider === "Ollama" && !m.startsWith("ollama-") && !m.includes(":") ? `ollama-${m}` : m;
6424
+ updateEnv("HARPER_AGENT_COMPACTION_MODEL", finalModelName);
6261
6425
  goNext();
6262
6426
  },
6263
6427
  onBack: goBack
@@ -6709,7 +6873,7 @@ function ensureApiKey() {
6709
6873
  requiredEnvVars.add("ANTHROPIC_API_KEY");
6710
6874
  } else if (model.startsWith("gemini-")) {
6711
6875
  requiredEnvVars.add("GOOGLE_GENERATIVE_AI_API_KEY");
6712
- } else if (model.startsWith("ollama-")) {
6876
+ } else if (model.startsWith("ollama-") || model.includes(":")) {
6713
6877
  } else {
6714
6878
  requiredEnvVars.add("OPENAI_API_KEY");
6715
6879
  }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@harperfast/agent",
3
3
  "description": "AI to help you with Harper app management",
4
- "version": "0.15.10",
4
+ "version": "0.16.1",
5
5
  "main": "dist/agent.js",
6
6
  "repository": "github:HarperFast/harper-agent",
7
7
  "bugs": {