@harperfast/agent 0.15.9 → 0.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/agent.js +186 -50
  2. package/package.json +3 -3
package/dist/agent.js CHANGED
@@ -285,8 +285,7 @@ function defaultInstructions() {
285
285
  import { anthropic } from "@ai-sdk/anthropic";
286
286
  import { google } from "@ai-sdk/google";
287
287
  import { openai } from "@ai-sdk/openai";
288
- import { aisdk } from "@openai/agents-extensions";
289
- import "@openai/agents-extensions/ai-sdk";
288
+ import { aisdk } from "@openai/agents-extensions/ai-sdk";
290
289
  import { createOllama, ollama } from "ollama-ai-provider-v2";
291
290
 
292
291
  // agent/defaults.ts
@@ -297,8 +296,8 @@ var defaultAnthropicModel = "claude-4-6-opus-latest";
297
296
  var defaultAnthropicCompactionModel = "claude-4-5-haiku-latest";
298
297
  var defaultGoogleModel = "gemini-3-pro";
299
298
  var defaultGoogleCompactionModel = "gemini-2.5-flash-lite";
300
- var defaultOllamaModel = "ollama-qwen3-coder:30b";
301
- var defaultOllamaCompactionModel = "ollama-qwen2.5-coder";
299
+ var defaultOllamaModel = "ollama-qwen3.5";
300
+ var defaultOllamaCompactionModel = "ollama-qwen3.5:2b";
302
301
  var defaultModels = [
303
302
  defaultOpenAIModel,
304
303
  defaultAnthropicModel,
@@ -312,12 +311,45 @@ var defaultCompactionModels = [
312
311
  defaultOllamaCompactionModel
313
312
  ];
314
313
 
314
+ // utils/ollama/normalizeOllamaBaseUrl.ts
315
+ function normalizeOllamaBaseUrl(baseUrl) {
316
+ let url = baseUrl.trim();
317
+ if (!url.startsWith("http://") && !url.startsWith("https://")) {
318
+ url = `http://${url}`;
319
+ }
320
+ const urlObj = new URL(url);
321
+ if (!urlObj.port) {
322
+ urlObj.port = "11434";
323
+ }
324
+ let pathname = urlObj.pathname;
325
+ if (pathname.endsWith("/")) {
326
+ pathname = pathname.slice(0, -1);
327
+ }
328
+ if (!pathname.endsWith("/api")) {
329
+ pathname += "/api";
330
+ }
331
+ urlObj.pathname = pathname;
332
+ return urlObj.toString().replace(/\/$/, "");
333
+ }
334
+
315
335
  // lifecycle/getModel.ts
316
336
  function isOpenAIModel(modelName) {
317
337
  if (!modelName || modelName === defaultOpenAIModel) {
318
338
  return true;
319
339
  }
320
- return !modelName.startsWith("claude-") && !modelName.startsWith("gemini-") && !modelName.startsWith("ollama-");
340
+ return !modelName.startsWith("claude-") && !modelName.startsWith("gemini-") && !modelName.startsWith("ollama-") && !modelName.includes(":");
341
+ }
342
+ function getProvider(modelName) {
343
+ if (modelName.startsWith("claude-")) {
344
+ return "Anthropic";
345
+ }
346
+ if (modelName.startsWith("gemini-")) {
347
+ return "Google";
348
+ }
349
+ if (modelName.startsWith("ollama-") || modelName.includes(":")) {
350
+ return "Ollama";
351
+ }
352
+ return "OpenAI";
321
353
  }
322
354
  function getModel(modelName) {
323
355
  if (modelName.startsWith("claude-")) {
@@ -326,9 +358,9 @@ function getModel(modelName) {
326
358
  if (modelName.startsWith("gemini-")) {
327
359
  return aisdk(google(modelName));
328
360
  }
329
- if (modelName.startsWith("ollama-")) {
361
+ if (modelName.startsWith("ollama-") || modelName.includes(":")) {
330
362
  const ollamaBaseUrl = process.env.OLLAMA_BASE_URL ? normalizeOllamaBaseUrl(process.env.OLLAMA_BASE_URL) : void 0;
331
- const ollamaProvider = ollamaBaseUrl ? createOllama({ baseURL: ollamaBaseUrl }) : ollama;
363
+ const ollamaProvider = ollamaBaseUrl ? createOllama({ baseURL: ollamaBaseUrl, compatibility: "strict" }) : ollama;
332
364
  return aisdk(ollamaProvider(getModelName(modelName)));
333
365
  }
334
366
  return aisdk(openai(modelName));
@@ -341,29 +373,10 @@ function getModelName(modelName) {
341
373
  return modelName;
342
374
  }
343
375
  if (modelName.startsWith("ollama-")) {
344
- return modelName.replace("ollama-", "");
376
+ return modelName.slice(7);
345
377
  }
346
378
  return modelName;
347
379
  }
348
- function normalizeOllamaBaseUrl(baseUrl) {
349
- let url = baseUrl.trim();
350
- if (!url.startsWith("http://") && !url.startsWith("https://")) {
351
- url = `http://${url}`;
352
- }
353
- const urlObj = new URL(url);
354
- if (!urlObj.port) {
355
- urlObj.port = "11434";
356
- }
357
- let pathname = urlObj.pathname;
358
- if (pathname.endsWith("/")) {
359
- pathname = pathname.slice(0, -1);
360
- }
361
- if (!pathname.endsWith("/api")) {
362
- pathname += "/api";
363
- }
364
- urlObj.pathname = pathname;
365
- return urlObj.toString().replace(/\/$/, "");
366
- }
367
380
 
368
381
  // lifecycle/handleExit.ts
369
382
  import { getGlobalTraceProvider } from "@openai/agents";
@@ -2465,6 +2478,66 @@ function createTools() {
2465
2478
  ];
2466
2479
  }
2467
2480
 
2481
+ // utils/ollama/fetchOllamaModels.ts
2482
+ async function fetchOllamaModels() {
2483
+ const ollamaBaseUrl = process.env.OLLAMA_BASE_URL ? normalizeOllamaBaseUrl(process.env.OLLAMA_BASE_URL) : "http://localhost:11434/api";
2484
+ try {
2485
+ const response = await fetch(`${ollamaBaseUrl}/tags`);
2486
+ if (!response.ok) {
2487
+ return [];
2488
+ }
2489
+ const data = await response.json();
2490
+ return data.models.map((m) => m.name);
2491
+ } catch {
2492
+ return [];
2493
+ }
2494
+ }
2495
+
2496
+ // utils/ollama/pullOllamaModel.ts
2497
+ async function pullOllamaModel(modelName, onProgress) {
2498
+ const ollamaBaseUrl = process.env.OLLAMA_BASE_URL ? normalizeOllamaBaseUrl(process.env.OLLAMA_BASE_URL) : "http://localhost:11434/api";
2499
+ const response = await fetch(`${ollamaBaseUrl}/pull`, {
2500
+ method: "POST",
2501
+ body: JSON.stringify({ name: modelName, stream: true })
2502
+ });
2503
+ if (!response.ok) {
2504
+ throw new Error(`Failed to pull Ollama model ${modelName}: ${response.statusText}`);
2505
+ }
2506
+ if (!response.body) {
2507
+ throw new Error(`Failed to pull Ollama model ${modelName}: No response body`);
2508
+ }
2509
+ const reader = response.body.getReader();
2510
+ const decoder = new TextDecoder();
2511
+ while (true) {
2512
+ const { done, value } = await reader.read();
2513
+ if (done) {
2514
+ break;
2515
+ }
2516
+ const chunk = decoder.decode(value, { stream: true });
2517
+ const lines = chunk.split("\n");
2518
+ for (const line of lines) {
2519
+ if (!line.trim()) {
2520
+ continue;
2521
+ }
2522
+ try {
2523
+ const json = JSON.parse(line);
2524
+ if (onProgress) {
2525
+ onProgress(json);
2526
+ }
2527
+ } catch {
2528
+ }
2529
+ }
2530
+ }
2531
+ }
2532
+
2533
+ // utils/ollama/ensureOllamaModel.ts
2534
+ async function ensureOllamaModel(modelName, onProgress) {
2535
+ const models = await fetchOllamaModels();
2536
+ if (!models.includes(modelName)) {
2537
+ await pullOllamaModel(modelName, onProgress);
2538
+ }
2539
+ }
2540
+
2468
2541
  // utils/sessions/createSession.ts
2469
2542
  import { MemorySession as MemorySession3 } from "@openai/agents";
2470
2543
 
@@ -2844,6 +2917,18 @@ async function compactConversation(items) {
2844
2917
  let noticeContent = "... conversation history compacted ...";
2845
2918
  if (trackedState.compactionModel && itemsToCompact.length > 0) {
2846
2919
  try {
2920
+ if (getProvider(trackedState.compactionModel) === "Ollama") {
2921
+ const modelName = getModelName(trackedState.compactionModel);
2922
+ await ensureOllamaModel(modelName, (progress) => {
2923
+ emitToListeners("SetPulling", {
2924
+ modelName,
2925
+ status: progress.status,
2926
+ completed: progress.completed ?? 0,
2927
+ total: progress.total ?? 0
2928
+ });
2929
+ });
2930
+ emitToListeners("SetPulling", null);
2931
+ }
2847
2932
  const agent = new Agent({
2848
2933
  name: "History Compactor",
2849
2934
  model: isOpenAIModel(trackedState.compactionModel) ? trackedState.compactionModel : getModel(trackedState.compactionModel),
@@ -3872,6 +3957,23 @@ var AgentManager = class {
3872
3957
  async runTask(task, isPrompt) {
3873
3958
  this.controller = new AbortController();
3874
3959
  await this.runCompactionIfWeWereIdle();
3960
+ if (getProvider(trackedState.model) === "Ollama") {
3961
+ try {
3962
+ const modelName = getModelName(trackedState.model);
3963
+ await ensureOllamaModel(modelName, (progress) => {
3964
+ emitToListeners("SetPulling", {
3965
+ modelName,
3966
+ status: progress.status,
3967
+ completed: progress.completed ?? 0,
3968
+ total: progress.total ?? 0
3969
+ });
3970
+ });
3971
+ emitToListeners("SetPulling", null);
3972
+ } catch (err) {
3973
+ emitToListeners("SetPulling", null);
3974
+ logError(err);
3975
+ }
3976
+ }
3875
3977
  emitToListeners("SetThinking", true);
3876
3978
  let taskOrState = task;
3877
3979
  const lowerTask = task.toLowerCase();
@@ -3991,6 +4093,7 @@ var ChatProvider = ({
3991
4093
  const [userInputMode, setUserInputMode] = useState3("waiting");
3992
4094
  const [isThinking, setIsThinking] = useState3(false);
3993
4095
  const [isCompacting, setIsCompacting] = useState3(false);
4096
+ const [pullingState, setPullingState] = useState3(null);
3994
4097
  const [focusedArea, setFocusedArea] = useState3("input");
3995
4098
  useListener("PushNewMessages", (messages2) => {
3996
4099
  setMessages((prev) => {
@@ -4008,6 +4111,9 @@ var ChatProvider = ({
4008
4111
  useListener("SetCompacting", (value2) => {
4009
4112
  setIsCompacting(Boolean(value2));
4010
4113
  }, []);
4114
+ useListener("SetPulling", (value2) => {
4115
+ setPullingState(value2);
4116
+ }, []);
4011
4117
  useListener("InterruptThought", () => {
4012
4118
  setIsThinking(false);
4013
4119
  }, []);
@@ -4045,9 +4151,10 @@ var ChatProvider = ({
4045
4151
  userInputMode,
4046
4152
  isThinking,
4047
4153
  isCompacting,
4154
+ pullingState,
4048
4155
  focusedArea,
4049
4156
  setFocusedArea
4050
- }), [messages, userInputMode, isThinking, isCompacting, focusedArea]);
4157
+ }), [messages, userInputMode, isThinking, isCompacting, pullingState, focusedArea]);
4051
4158
  return /* @__PURE__ */ jsx3(ChatContext.Provider, { value, children });
4052
4159
  };
4053
4160
 
@@ -4926,10 +5033,7 @@ function SettingsView({ isDense = false }) {
4926
5033
  {
4927
5034
  label: "<edit settings>",
4928
5035
  isAction: true,
4929
- action: () => {
4930
- bootstrapConfig(() => {
4931
- });
4932
- }
5036
+ action: bootstrapConfig
4933
5037
  }
4934
5038
  ], [autoApproveCodeInterpreter, autoApprovePatches, autoApproveShell, monitorRateLimits]);
4935
5039
  useInput2((_input, key) => {
@@ -5334,7 +5438,7 @@ function calculatePlaceholder(mode) {
5334
5438
  // ink/components/ChatContent.tsx
5335
5439
  import { jsx as jsx17, jsxs as jsxs9 } from "react/jsx-runtime";
5336
5440
  function ChatContent() {
5337
- const { messages, isThinking, isCompacting, focusedArea, setFocusedArea } = useChat();
5441
+ const { messages, isThinking, isCompacting, pullingState, focusedArea, setFocusedArea } = useChat();
5338
5442
  const { payload } = useApproval();
5339
5443
  const size = useTerminalSize();
5340
5444
  useMessageListener();
@@ -5430,6 +5534,7 @@ function ChatContent() {
5430
5534
  return 6;
5431
5535
  }, []);
5432
5536
  const availableTextWidth = timelineWidth - 4;
5537
+ const pullingHeight = pullingState ? 1 : 0;
5433
5538
  const lineItems = useMemo11(() => {
5434
5539
  const acc = [];
5435
5540
  for (const msg of messages) {
@@ -5516,7 +5621,7 @@ function ChatContent() {
5516
5621
  ], []);
5517
5622
  const timelineTitle = "TIMELINE:";
5518
5623
  const timelineHeaderWidth = timelineWidth - 1;
5519
- const showSpinner = isCompacting || isThinking;
5624
+ const showSpinner = isCompacting || isThinking || Boolean(pullingState);
5520
5625
  const timelineDashes = timelineHeaderWidth - timelineTitle.length - (showSpinner ? 5 : 0);
5521
5626
  const tabsTotalWidth = tabs.reduce((acc, t) => acc + t.label.length + 2, 0) + (tabs.length - 1);
5522
5627
  const statusDashes = Math.max(0, statusWidth - tabsTotalWidth - 2);
@@ -5553,7 +5658,7 @@ function ChatContent() {
5553
5658
  "\u256E"
5554
5659
  ] })
5555
5660
  ] }),
5556
- /* @__PURE__ */ jsxs9(Box10, { flexDirection: "row", height: contentHeight - 2, children: [
5661
+ /* @__PURE__ */ jsxs9(Box10, { flexDirection: "row", height: contentHeight - 2 - pullingHeight, children: [
5557
5662
  /* @__PURE__ */ jsx17(
5558
5663
  Box10,
5559
5664
  {
@@ -5566,7 +5671,7 @@ function ChatContent() {
5566
5671
  {
5567
5672
  items: lineItems,
5568
5673
  itemHeight: 1,
5569
- height: contentHeight - 2,
5674
+ height: contentHeight - 2 - pullingHeight,
5570
5675
  selectedIndex,
5571
5676
  renderOverflowTop: useCallback5((count) => /* @__PURE__ */ jsxs9(Box10, { children: [
5572
5677
  /* @__PURE__ */ jsx17(Text10, { color: "gray", dimColor: true, children: "\u2502" }),
@@ -5634,17 +5739,36 @@ function ChatContent() {
5634
5739
  /* @__PURE__ */ jsx17(Box10, { marginTop: 1, children: /* @__PURE__ */ jsx17(SettingsView, { isDense: true }) })
5635
5740
  ] }),
5636
5741
  activeTab === "planDescription" && /* @__PURE__ */ jsx17(PlanView, {}),
5637
- activeTab === "actions" && /* @__PURE__ */ jsx17(ActionsView, { height: contentHeight - 2, isFocused: focusedArea === "status" })
5742
+ activeTab === "actions" && /* @__PURE__ */ jsx17(
5743
+ ActionsView,
5744
+ {
5745
+ height: contentHeight - 2 - pullingHeight,
5746
+ isFocused: focusedArea === "status"
5747
+ }
5748
+ )
5638
5749
  ] })
5639
5750
  }
5640
5751
  )
5641
5752
  ] }),
5642
- /* @__PURE__ */ jsxs9(Box10, { flexDirection: "row", height: 1, children: [
5643
- /* @__PURE__ */ jsx17(Text10, { color: junctionLeftColor, children: "\u2522" }),
5644
- /* @__PURE__ */ jsx17(Text10, { color: timelineBottomColor, children: "\u2501".repeat(timelineWidth - 1) }),
5645
- /* @__PURE__ */ jsx17(Text10, { color: junctionMiddleColor, children: "\u2537" }),
5646
- /* @__PURE__ */ jsx17(Text10, { color: statusBottomColor, children: "\u2501".repeat(Math.max(0, statusWidth - 2)) }),
5647
- /* @__PURE__ */ jsx17(Text10, { color: junctionRightColor, children: "\u252A" })
5753
+ /* @__PURE__ */ jsxs9(Box10, { flexDirection: "column", children: [
5754
+ pullingState && /* @__PURE__ */ jsx17(Box10, { paddingLeft: 2, paddingRight: 2, marginBottom: 0, children: /* @__PURE__ */ jsxs9(Text10, { color: "yellow", children: [
5755
+ `\uF019 Downloading `,
5756
+ /* @__PURE__ */ jsx17(Text10, { bold: true, children: pullingState.modelName }),
5757
+ ` from Ollama... `,
5758
+ /* @__PURE__ */ jsx17(Text10, { dimColor: true, children: pullingState.status === "pulling manifest" ? "initializing" : pullingState.status }),
5759
+ pullingState.total > 0 && /* @__PURE__ */ jsxs9(Text10, { children: [
5760
+ ` [${"=".repeat(Math.floor(pullingState.completed / pullingState.total * 20))}${" ".repeat(20 - Math.floor(pullingState.completed / pullingState.total * 20))}] `,
5761
+ Math.round(pullingState.completed / pullingState.total * 100),
5762
+ "%"
5763
+ ] })
5764
+ ] }) }),
5765
+ /* @__PURE__ */ jsxs9(Box10, { flexDirection: "row", height: 1, children: [
5766
+ /* @__PURE__ */ jsx17(Text10, { color: junctionLeftColor, children: "\u2522" }),
5767
+ /* @__PURE__ */ jsx17(Text10, { color: timelineBottomColor, children: "\u2501".repeat(timelineWidth - 1) }),
5768
+ /* @__PURE__ */ jsx17(Text10, { color: junctionMiddleColor, children: "\u2537" }),
5769
+ /* @__PURE__ */ jsx17(Text10, { color: statusBottomColor, children: "\u2501".repeat(Math.max(0, statusWidth - 2)) }),
5770
+ /* @__PURE__ */ jsx17(Text10, { color: junctionRightColor, children: "\u252A" })
5771
+ ] })
5648
5772
  ] }),
5649
5773
  /* @__PURE__ */ jsx17(UserInput, {})
5650
5774
  ] });
@@ -5849,7 +5973,7 @@ function DiffApprovalView() {
5849
5973
  // ink/configurationWizard/ConfigurationWizard.tsx
5850
5974
  import { Box as Box18, useInput as useInput10 } from "ink";
5851
5975
  import { Step, Stepper } from "ink-stepper";
5852
- import { useState as useState17 } from "react";
5976
+ import { useEffect as useEffect14, useState as useState17 } from "react";
5853
5977
 
5854
5978
  // utils/files/getEnvVarForProvider.ts
5855
5979
  function getEnvVarForProvider(provider) {
@@ -6044,7 +6168,7 @@ var modelsByProvider = {
6044
6168
  OpenAI: [defaultOpenAIModel, "gpt-5.0", defaultOpenAICompactionModel],
6045
6169
  Anthropic: [defaultAnthropicModel, "claude-4-5-sonnet-latest", defaultAnthropicCompactionModel],
6046
6170
  Google: [defaultGoogleModel, "gemini-3-flash", "gemini-2.5-flash", defaultGoogleCompactionModel],
6047
- Ollama: [defaultOllamaModel, "mistral", defaultOllamaCompactionModel]
6171
+ Ollama: [defaultOllamaModel, "ollama-qwen3.5:27b", defaultOllamaCompactionModel]
6048
6172
  };
6049
6173
  var compactorModelsByProvider = {
6050
6174
  OpenAI: modelsByProvider.OpenAI.slice().reverse(),
@@ -6198,13 +6322,23 @@ function StepperProgress({ steps, currentStep }) {
6198
6322
  import { jsx as jsx25, jsxs as jsxs17 } from "react/jsx-runtime";
6199
6323
  function ConfigurationWizard({ onComplete }) {
6200
6324
  const [provider, setProvider] = useState17("OpenAI");
6325
+ const [ollamaModels, setOllamaModels] = useState17([]);
6326
+ useEffect14(() => {
6327
+ if (provider === "Ollama") {
6328
+ fetchOllamaModels().then((models2) => {
6329
+ if (models2.length > 0) {
6330
+ setOllamaModels(models2);
6331
+ }
6332
+ });
6333
+ }
6334
+ }, [provider]);
6201
6335
  useInput10((input, key) => {
6202
6336
  if (key.ctrl && input === "x") {
6203
6337
  emitToListeners("ExitUI", void 0);
6204
6338
  }
6205
6339
  });
6206
- const models = modelsByProvider[provider];
6207
- const compactorModels = compactorModelsByProvider[provider];
6340
+ const models = provider === "Ollama" && ollamaModels.length > 0 ? [.../* @__PURE__ */ new Set([...ollamaModels, ...modelsByProvider[provider]])] : modelsByProvider[provider];
6341
+ const compactorModels = provider === "Ollama" && ollamaModels.length > 0 ? [.../* @__PURE__ */ new Set([...ollamaModels, ...compactorModelsByProvider[provider]])] : compactorModelsByProvider[provider];
6208
6342
  return /* @__PURE__ */ jsx25(Box18, { flexDirection: "column", padding: 1, minHeight: 10, children: /* @__PURE__ */ jsxs17(
6209
6343
  Stepper,
6210
6344
  {
@@ -6249,7 +6383,8 @@ function ConfigurationWizard({ onComplete }) {
6249
6383
  title: "What model would you like to use?",
6250
6384
  models,
6251
6385
  onConfirm: (m) => {
6252
- updateEnv("HARPER_AGENT_MODEL", m);
6386
+ const finalModelName = provider === "Ollama" && !m.startsWith("ollama-") && !m.includes(":") ? `ollama-${m}` : m;
6387
+ updateEnv("HARPER_AGENT_MODEL", finalModelName);
6253
6388
  goNext();
6254
6389
  },
6255
6390
  onBack: goBack
@@ -6261,7 +6396,8 @@ function ConfigurationWizard({ onComplete }) {
6261
6396
  title: "What model should we use for memory compaction?",
6262
6397
  models: compactorModels,
6263
6398
  onConfirm: (m) => {
6264
- updateEnv("HARPER_AGENT_COMPACTION_MODEL", m);
6399
+ const finalModelName = provider === "Ollama" && !m.startsWith("ollama-") && !m.includes(":") ? `ollama-${m}` : m;
6400
+ updateEnv("HARPER_AGENT_COMPACTION_MODEL", finalModelName);
6265
6401
  goNext();
6266
6402
  },
6267
6403
  onBack: goBack
@@ -6713,7 +6849,7 @@ function ensureApiKey() {
6713
6849
  requiredEnvVars.add("ANTHROPIC_API_KEY");
6714
6850
  } else if (model.startsWith("gemini-")) {
6715
6851
  requiredEnvVars.add("GOOGLE_GENERATIVE_AI_API_KEY");
6716
- } else if (model.startsWith("ollama-")) {
6852
+ } else if (model.startsWith("ollama-") || model.includes(":")) {
6717
6853
  } else {
6718
6854
  requiredEnvVars.add("OPENAI_API_KEY");
6719
6855
  }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@harperfast/agent",
3
3
  "description": "AI to help you with Harper app management",
4
- "version": "0.15.9",
4
+ "version": "0.16.0",
5
5
  "main": "dist/agent.js",
6
6
  "repository": "github:HarperFast/harper-agent",
7
7
  "bugs": {
@@ -48,8 +48,8 @@
48
48
  "@ai-sdk/openai": "^3.0.26",
49
49
  "@harperfast/skills": "^1.0.0",
50
50
  "@inkjs/ui": "^2.0.0",
51
- "@openai/agents": "^0.5.0",
52
- "@openai/agents-extensions": "^0.5.0",
51
+ "@openai/agents": "^0.7.0",
52
+ "@openai/agents-extensions": "^0.7.0",
53
53
  "ai": "^6.0.79",
54
54
  "chalk": "^5.6.2",
55
55
  "cross-spawn": "^7.0.6",