@oh-my-pi/pi-coding-agent 13.9.2 → 13.9.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/CHANGELOG.md +64 -0
  2. package/examples/sdk/02-custom-model.ts +2 -1
  3. package/package.json +7 -7
  4. package/src/cli/args.ts +10 -6
  5. package/src/cli/list-models.ts +2 -2
  6. package/src/commands/launch.ts +3 -3
  7. package/src/config/model-registry.ts +136 -38
  8. package/src/config/model-resolver.ts +47 -21
  9. package/src/config/settings-schema.ts +56 -2
  10. package/src/discovery/helpers.ts +3 -3
  11. package/src/extensibility/custom-tools/types.ts +2 -0
  12. package/src/extensibility/extensions/loader.ts +3 -2
  13. package/src/extensibility/extensions/types.ts +10 -7
  14. package/src/extensibility/hooks/types.ts +2 -0
  15. package/src/main.ts +5 -22
  16. package/src/memories/index.ts +7 -3
  17. package/src/modes/components/footer.ts +10 -8
  18. package/src/modes/components/model-selector.ts +33 -38
  19. package/src/modes/components/settings-defs.ts +32 -3
  20. package/src/modes/components/settings-selector.ts +16 -5
  21. package/src/modes/components/status-line/context-thresholds.ts +68 -0
  22. package/src/modes/components/status-line/segments.ts +11 -12
  23. package/src/modes/components/status-line.ts +2 -6
  24. package/src/modes/components/thinking-selector.ts +7 -7
  25. package/src/modes/components/tree-selector.ts +3 -2
  26. package/src/modes/controllers/command-controller.ts +11 -26
  27. package/src/modes/controllers/event-controller.ts +16 -3
  28. package/src/modes/controllers/input-controller.ts +4 -2
  29. package/src/modes/controllers/selector-controller.ts +5 -4
  30. package/src/modes/interactive-mode.ts +2 -2
  31. package/src/modes/rpc/rpc-client.ts +5 -10
  32. package/src/modes/rpc/rpc-types.ts +5 -5
  33. package/src/modes/theme/theme.ts +8 -3
  34. package/src/priority.json +1 -0
  35. package/src/prompts/system/auto-handoff-threshold-focus.md +1 -0
  36. package/src/prompts/system/system-prompt.md +18 -2
  37. package/src/prompts/tools/hashline.md +139 -83
  38. package/src/sdk.ts +24 -16
  39. package/src/session/agent-session.ts +261 -118
  40. package/src/session/agent-storage.ts +14 -14
  41. package/src/session/compaction/compaction.ts +500 -13
  42. package/src/session/messages.ts +12 -1
  43. package/src/session/session-manager.ts +77 -19
  44. package/src/slash-commands/builtin-registry.ts +48 -0
  45. package/src/task/agents.ts +3 -2
  46. package/src/task/executor.ts +2 -2
  47. package/src/task/types.ts +2 -1
  48. package/src/thinking.ts +87 -0
  49. package/src/tools/browser.ts +15 -6
  50. package/src/tools/fetch.ts +118 -100
  51. package/src/tools/index.ts +2 -1
  52. package/src/web/kagi.ts +62 -7
  53. package/src/web/search/providers/exa.ts +74 -3
@@ -5,7 +5,14 @@
5
5
  * and provides a transformer to convert them to LLM-compatible messages.
6
6
  */
7
7
  import type { AgentMessage } from "@oh-my-pi/pi-agent-core";
8
- import type { ImageContent, Message, MessageAttribution, TextContent, ToolResultMessage } from "@oh-my-pi/pi-ai";
8
+ import type {
9
+ ImageContent,
10
+ Message,
11
+ MessageAttribution,
12
+ ProviderPayload,
13
+ TextContent,
14
+ ToolResultMessage,
15
+ } from "@oh-my-pi/pi-ai";
9
16
  import { renderPromptTemplate } from "../config/prompt-templates";
10
17
  import branchSummaryContextPrompt from "../prompts/compaction/branch-summary-context.md" with { type: "text" };
11
18
  import compactionSummaryContextPrompt from "../prompts/compaction/compaction-summary-context.md" with { type: "text" };
@@ -106,6 +113,7 @@ export interface CompactionSummaryMessage {
106
113
  summary: string;
107
114
  shortSummary?: string;
108
115
  tokensBefore: number;
116
+ providerPayload?: ProviderPayload;
109
117
  timestamp: number;
110
118
  }
111
119
 
@@ -193,12 +201,14 @@ export function createCompactionSummaryMessage(
193
201
  tokensBefore: number,
194
202
  timestamp: string,
195
203
  shortSummary?: string,
204
+ providerPayload?: ProviderPayload,
196
205
  ): CompactionSummaryMessage {
197
206
  return {
198
207
  role: "compactionSummary",
199
208
  summary,
200
209
  shortSummary,
201
210
  tokensBefore,
211
+ providerPayload,
202
212
  timestamp: new Date(timestamp).getTime(),
203
213
  };
204
214
  }
@@ -289,6 +299,7 @@ export function convertToLlm(messages: AgentMessage[]): Message[] {
289
299
  },
290
300
  ],
291
301
  attribution: "agent",
302
+ providerPayload: m.providerPayload,
292
303
  timestamp: m.timestamp,
293
304
  };
294
305
  case "fileMention": {
@@ -2,7 +2,15 @@ import * as fs from "node:fs";
2
2
  import * as os from "node:os";
3
3
  import * as path from "node:path";
4
4
  import type { AgentMessage } from "@oh-my-pi/pi-agent-core";
5
- import type { ImageContent, Message, MessageAttribution, TextContent, Usage } from "@oh-my-pi/pi-ai";
5
+ import type {
6
+ ImageContent,
7
+ Message,
8
+ MessageAttribution,
9
+ ProviderPayload,
10
+ ServiceTier,
11
+ TextContent,
12
+ Usage,
13
+ } from "@oh-my-pi/pi-ai";
6
14
  import { getTerminalId } from "@oh-my-pi/pi-tui";
7
15
  import {
8
16
  getBlobsDir,
@@ -59,7 +67,7 @@ export interface SessionMessageEntry extends SessionEntryBase {
59
67
 
60
68
  export interface ThinkingLevelChangeEntry extends SessionEntryBase {
61
69
  type: "thinking_level_change";
62
- thinkingLevel: string;
70
+ thinkingLevel?: string | null;
63
71
  }
64
72
 
65
73
  export interface ModelChangeEntry extends SessionEntryBase {
@@ -70,6 +78,11 @@ export interface ModelChangeEntry extends SessionEntryBase {
70
78
  role?: string;
71
79
  }
72
80
 
81
+ export interface ServiceTierChangeEntry extends SessionEntryBase {
82
+ type: "service_tier_change";
83
+ serviceTier: ServiceTier | null;
84
+ }
85
+
73
86
  export interface CompactionEntry<T = unknown> extends SessionEntryBase {
74
87
  type: "compaction";
75
88
  summary: string;
@@ -173,6 +186,7 @@ export type SessionEntry =
173
186
  | SessionMessageEntry
174
187
  | ThinkingLevelChangeEntry
175
188
  | ModelChangeEntry
189
+ | ServiceTierChangeEntry
176
190
  | CompactionEntry
177
191
  | BranchSummaryEntry
178
192
  | CustomEntry
@@ -195,7 +209,8 @@ export interface SessionTreeNode {
195
209
 
196
210
  export interface SessionContext {
197
211
  messages: AgentMessage[];
198
- thinkingLevel: string;
212
+ thinkingLevel?: string;
213
+ serviceTier?: ServiceTier;
199
214
  /** Model roles: { default: "provider/modelId", small: "provider/modelId", ... } */
200
215
  models: Record<string, string>;
201
216
  /** Names of TTSR rules that have been injected this session */
@@ -416,7 +431,14 @@ export function buildSessionContext(
416
431
  let leaf: SessionEntry | undefined;
417
432
  if (leafId === null) {
418
433
  // Explicitly null - return no messages (navigated to before first entry)
419
- return { messages: [], thinkingLevel: "off", models: {}, injectedTtsrRules: [], mode: "none" };
434
+ return {
435
+ messages: [],
436
+ thinkingLevel: "off",
437
+ serviceTier: undefined,
438
+ models: {},
439
+ injectedTtsrRules: [],
440
+ mode: "none",
441
+ };
420
442
  }
421
443
  if (leafId) {
422
444
  leaf = byId.get(leafId);
@@ -427,7 +449,14 @@ export function buildSessionContext(
427
449
  }
428
450
 
429
451
  if (!leaf) {
430
- return { messages: [], thinkingLevel: "off", models: {}, injectedTtsrRules: [], mode: "none" };
452
+ return {
453
+ messages: [],
454
+ thinkingLevel: "off",
455
+ serviceTier: undefined,
456
+ models: {},
457
+ injectedTtsrRules: [],
458
+ mode: "none",
459
+ };
431
460
  }
432
461
 
433
462
  // Walk from leaf to root, collecting path
@@ -439,7 +468,8 @@ export function buildSessionContext(
439
468
  }
440
469
 
441
470
  // Extract settings and find compaction
442
- let thinkingLevel = "off";
471
+ let thinkingLevel: string | undefined = "off";
472
+ let serviceTier: ServiceTier | undefined;
443
473
  const models: Record<string, string> = {};
444
474
  let compaction: CompactionEntry | null = null;
445
475
  const injectedTtsrRulesSet = new Set<string>();
@@ -448,13 +478,15 @@ export function buildSessionContext(
448
478
 
449
479
  for (const entry of path) {
450
480
  if (entry.type === "thinking_level_change") {
451
- thinkingLevel = entry.thinkingLevel;
481
+ thinkingLevel = entry.thinkingLevel ?? "off";
452
482
  } else if (entry.type === "model_change") {
453
483
  // New format: { model: "provider/id", role?: string }
454
484
  if (entry.model) {
455
485
  const role = entry.role ?? "default";
456
486
  models[role] = entry.model;
457
487
  }
488
+ } else if (entry.type === "service_tier_change") {
489
+ serviceTier = entry.serviceTier ?? undefined;
458
490
  } else if (entry.type === "message" && entry.message.role === "assistant") {
459
491
  // Infer default model from assistant messages
460
492
  models.default = `${entry.message.provider}/${entry.message.model}`;
@@ -500,6 +532,17 @@ export function buildSessionContext(
500
532
  };
501
533
 
502
534
  if (compaction) {
535
+ const remoteReplacementHistory = (() => {
536
+ const candidate = compaction.preserveData?.openaiRemoteCompaction;
537
+ if (!candidate || typeof candidate !== "object") return undefined;
538
+ const replacementHistory = (candidate as { replacementHistory?: unknown }).replacementHistory;
539
+ if (!Array.isArray(replacementHistory)) return undefined;
540
+ return replacementHistory as Array<Record<string, unknown>>;
541
+ })();
542
+ const providerPayload: ProviderPayload | undefined = remoteReplacementHistory
543
+ ? { type: "openaiResponsesHistory", items: remoteReplacementHistory }
544
+ : undefined;
545
+
503
546
  // Emit summary first
504
547
  messages.push(
505
548
  createCompactionSummaryMessage(
@@ -507,21 +550,24 @@ export function buildSessionContext(
507
550
  compaction.tokensBefore,
508
551
  compaction.timestamp,
509
552
  compaction.shortSummary,
553
+ providerPayload,
510
554
  ),
511
555
  );
512
556
 
513
557
  // Find compaction index in path
514
558
  const compactionIdx = path.findIndex(e => e.type === "compaction" && e.id === compaction.id);
515
559
 
516
- // Emit kept messages (before compaction, starting from firstKeptEntryId)
517
- let foundFirstKept = false;
518
- for (let i = 0; i < compactionIdx; i++) {
519
- const entry = path[i];
520
- if (entry.id === compaction.firstKeptEntryId) {
521
- foundFirstKept = true;
522
- }
523
- if (foundFirstKept) {
524
- appendMessage(entry);
560
+ if (!remoteReplacementHistory) {
561
+ // Emit kept messages (before compaction, starting from firstKeptEntryId)
562
+ let foundFirstKept = false;
563
+ for (let i = 0; i < compactionIdx; i++) {
564
+ const entry = path[i];
565
+ if (entry.id === compaction.firstKeptEntryId) {
566
+ foundFirstKept = true;
567
+ }
568
+ if (foundFirstKept) {
569
+ appendMessage(entry);
570
+ }
525
571
  }
526
572
  }
527
573
 
@@ -537,7 +583,7 @@ export function buildSessionContext(
537
583
  }
538
584
  }
539
585
 
540
- return { messages, thinkingLevel, models, injectedTtsrRules, mode, modeData };
586
+ return { messages, thinkingLevel, serviceTier, models, injectedTtsrRules, mode, modeData };
541
587
  }
542
588
 
543
589
  /**
@@ -1783,13 +1829,25 @@ export class SessionManager {
1783
1829
  }
1784
1830
 
1785
1831
  /** Append a thinking level change as child of current leaf, then advance leaf. Returns entry id. */
1786
- appendThinkingLevelChange(thinkingLevel: string): string {
1832
+ appendThinkingLevelChange(thinkingLevel?: string): string {
1787
1833
  const entry: ThinkingLevelChangeEntry = {
1788
1834
  type: "thinking_level_change",
1789
1835
  id: generateId(this.#byId),
1790
1836
  parentId: this.#leafId,
1791
1837
  timestamp: new Date().toISOString(),
1792
- thinkingLevel,
1838
+ thinkingLevel: thinkingLevel ?? null,
1839
+ };
1840
+ this.#appendEntry(entry);
1841
+ return entry.id;
1842
+ }
1843
+
1844
+ appendServiceTierChange(serviceTier: ServiceTier | null): string {
1845
+ const entry: ServiceTierChangeEntry = {
1846
+ type: "service_tier_change",
1847
+ id: generateId(this.#byId),
1848
+ parentId: this.#leafId,
1849
+ timestamp: new Date().toISOString(),
1850
+ serviceTier,
1793
1851
  };
1794
1852
  this.#appendEntry(entry);
1795
1853
  return entry.id;
@@ -3,6 +3,12 @@ import type { SettingPath, SettingValue } from "../config/settings";
3
3
  import { settings } from "../config/settings";
4
4
  import type { InteractiveModeContext } from "../modes/types";
5
5
 
6
+ function refreshStatusLine(ctx: InteractiveModeContext): void {
7
+ ctx.statusLine.invalidate();
8
+ ctx.updateEditorTopBorder();
9
+ ctx.ui.requestRender();
10
+ }
11
+
6
12
  /** Declarative subcommand definition for commands like /mcp. */
7
13
  export interface SubcommandDef {
8
14
  name: string;
@@ -92,6 +98,48 @@ const BUILTIN_SLASH_COMMAND_REGISTRY: ReadonlyArray<BuiltinSlashCommandSpec> = [
92
98
  runtime.ctx.editor.setText("");
93
99
  },
94
100
  },
101
+ {
102
+ name: "fast",
103
+ description: "Toggle fast mode (OpenAI service tier priority)",
104
+ subcommands: [
105
+ { name: "on", description: "Enable fast mode" },
106
+ { name: "off", description: "Disable fast mode" },
107
+ { name: "status", description: "Show fast mode status" },
108
+ ],
109
+ allowArgs: true,
110
+ handle: (command, runtime) => {
111
+ const arg = command.args.trim().toLowerCase();
112
+ if (!arg || arg === "toggle") {
113
+ const enabled = runtime.ctx.session.toggleFastMode();
114
+ refreshStatusLine(runtime.ctx);
115
+ runtime.ctx.showStatus(`Fast mode ${enabled ? "enabled" : "disabled"}.`);
116
+ runtime.ctx.editor.setText("");
117
+ return;
118
+ }
119
+ if (arg === "on") {
120
+ runtime.ctx.session.setFastMode(true);
121
+ refreshStatusLine(runtime.ctx);
122
+ runtime.ctx.showStatus("Fast mode enabled.");
123
+ runtime.ctx.editor.setText("");
124
+ return;
125
+ }
126
+ if (arg === "off") {
127
+ runtime.ctx.session.setFastMode(false);
128
+ refreshStatusLine(runtime.ctx);
129
+ runtime.ctx.showStatus("Fast mode disabled.");
130
+ runtime.ctx.editor.setText("");
131
+ return;
132
+ }
133
+ if (arg === "status") {
134
+ const enabled = runtime.ctx.session.isFastModeEnabled();
135
+ runtime.ctx.showStatus(`Fast mode is ${enabled ? "on" : "off"}.`);
136
+ runtime.ctx.editor.setText("");
137
+ return;
138
+ }
139
+ runtime.ctx.showStatus("Usage: /fast [on|off|status]");
140
+ runtime.ctx.editor.setText("");
141
+ },
142
+ },
95
143
  {
96
144
  name: "export",
97
145
  description: "Export session to HTML file",
@@ -3,6 +3,7 @@
3
3
  *
4
4
  * Agents are embedded at build time via Bun's import with { type: "text" }.
5
5
  */
6
+ import { Effort } from "@oh-my-pi/pi-ai";
6
7
  import { renderPromptTemplate } from "../config/prompt-templates";
7
8
  import { parseAgentFields } from "../discovery/helpers";
8
9
  import designerMd from "../prompts/agents/designer.md" with { type: "text" };
@@ -53,7 +54,7 @@ const EMBEDDED_AGENT_DEFS: EmbeddedAgentDef[] = [
53
54
  description: "General-purpose subagent with full capabilities for delegated multi-step tasks",
54
55
  spawns: "*",
55
56
  model: "default",
56
- thinkingLevel: "medium",
57
+ thinkingLevel: Effort.Medium,
57
58
  },
58
59
  template: taskMd,
59
60
  },
@@ -63,7 +64,7 @@ const EMBEDDED_AGENT_DEFS: EmbeddedAgentDef[] = [
63
64
  name: "quick_task",
64
65
  description: "Low-reasoning agent for strictly mechanical updates or data collection only",
65
66
  model: "pi/smol",
66
- thinkingLevel: "minimal",
67
+ thinkingLevel: Effort.Minimal,
67
68
  },
68
69
  template: taskMd,
69
70
  },
@@ -4,8 +4,8 @@
4
4
  * Runs each subagent on the main thread and forwards AgentEvents for progress tracking.
5
5
  */
6
6
  import path from "node:path";
7
- import type { AgentEvent } from "@oh-my-pi/pi-agent-core";
8
- import type { Api, Model, ThinkingLevel, ToolChoice } from "@oh-my-pi/pi-ai";
7
+ import type { AgentEvent, ThinkingLevel } from "@oh-my-pi/pi-agent-core";
8
+ import type { Api, Model, ToolChoice } from "@oh-my-pi/pi-ai";
9
9
  import { logger, untilAborted } from "@oh-my-pi/pi-utils";
10
10
  import type { TSchema } from "@sinclair/typebox";
11
11
  import Ajv, { type ValidateFunction } from "ajv";
package/src/task/types.ts CHANGED
@@ -1,4 +1,5 @@
1
- import type { ThinkingLevel, Usage } from "@oh-my-pi/pi-ai";
1
+ import type { ThinkingLevel } from "@oh-my-pi/pi-agent-core";
2
+ import type { Usage } from "@oh-my-pi/pi-ai";
2
3
  import { $env } from "@oh-my-pi/pi-utils";
3
4
  import { type Static, Type } from "@sinclair/typebox";
4
5
  import type { NestedRepoPatch } from "./worktree";
@@ -0,0 +1,87 @@
1
+ import { type ResolvedThinkingLevel, ThinkingLevel } from "@oh-my-pi/pi-agent-core";
2
+ import { clampThinkingLevelForModel, type Effort, type Model, THINKING_EFFORTS } from "@oh-my-pi/pi-ai";
3
+
4
+ /**
5
+ * Metadata used to render thinking selector values in the coding-agent UI.
6
+ */
7
+ export interface ThinkingLevelMetadata {
8
+ value: ThinkingLevel;
9
+ label: string;
10
+ description: string;
11
+ }
12
+
13
+ const THINKING_LEVEL_METADATA: Record<ThinkingLevel, ThinkingLevelMetadata> = {
14
+ [ThinkingLevel.Inherit]: {
15
+ value: ThinkingLevel.Inherit,
16
+ label: "inherit",
17
+ description: "Inherit session default",
18
+ },
19
+ [ThinkingLevel.Off]: { value: ThinkingLevel.Off, label: "off", description: "No reasoning" },
20
+ [ThinkingLevel.Minimal]: {
21
+ value: ThinkingLevel.Minimal,
22
+ label: "min",
23
+ description: "Very brief reasoning (~1k tokens)",
24
+ },
25
+ [ThinkingLevel.Low]: { value: ThinkingLevel.Low, label: "low", description: "Light reasoning (~2k tokens)" },
26
+ [ThinkingLevel.Medium]: {
27
+ value: ThinkingLevel.Medium,
28
+ label: "medium",
29
+ description: "Moderate reasoning (~8k tokens)",
30
+ },
31
+ [ThinkingLevel.High]: { value: ThinkingLevel.High, label: "high", description: "Deep reasoning (~16k tokens)" },
32
+ [ThinkingLevel.XHigh]: {
33
+ value: ThinkingLevel.XHigh,
34
+ label: "xhigh",
35
+ description: "Maximum reasoning (~32k tokens)",
36
+ },
37
+ };
38
+
39
+ const THINKING_LEVELS = new Set<string>([ThinkingLevel.Inherit, ThinkingLevel.Off, ...THINKING_EFFORTS]);
40
+ const EFFORT_LEVELS = new Set<string>(THINKING_EFFORTS);
41
+
42
+ /**
43
+ * Parses a provider-facing effort value.
44
+ */
45
+ export function parseEffort(value: string | null | undefined): Effort | undefined {
46
+ return value !== undefined && value !== null && EFFORT_LEVELS.has(value) ? (value as Effort) : undefined;
47
+ }
48
+
49
+ /**
50
+ * Parses an agent-local thinking selector.
51
+ */
52
+ export function parseThinkingLevel(value: string | null | undefined): ThinkingLevel | undefined {
53
+ return value !== undefined && value !== null && THINKING_LEVELS.has(value) ? (value as ThinkingLevel) : undefined;
54
+ }
55
+
56
+ /**
57
+ * Returns display metadata for a thinking selector.
58
+ */
59
+ export function getThinkingLevelMetadata(level: ThinkingLevel): ThinkingLevelMetadata {
60
+ return THINKING_LEVEL_METADATA[level];
61
+ }
62
+
63
+ /**
64
+ * Converts an agent-local selector into the effort sent to providers.
65
+ */
66
+ export function toReasoningEffort(level: ThinkingLevel | undefined): Effort | undefined {
67
+ if (level === undefined || level === ThinkingLevel.Off || level === ThinkingLevel.Inherit) {
68
+ return undefined;
69
+ }
70
+ return level;
71
+ }
72
+
73
+ /**
74
+ * Resolves a selector against the current model while preserving explicit "off".
75
+ */
76
+ export function resolveThinkingLevelForModel(
77
+ model: Model | undefined,
78
+ level: ThinkingLevel | undefined,
79
+ ): ResolvedThinkingLevel | undefined {
80
+ if (level === undefined || level === ThinkingLevel.Inherit) {
81
+ return undefined;
82
+ }
83
+ if (level === ThinkingLevel.Off) {
84
+ return ThinkingLevel.Off;
85
+ }
86
+ return clampThinkingLevelForModel(model, level);
87
+ }
@@ -517,15 +517,24 @@ export class BrowserTool implements AgentTool<typeof browserSchema, BrowserToolD
517
517
  }
518
518
  : DEFAULT_VIEWPORT;
519
519
  const puppeteer = await loadPuppeteer();
520
+ const launchArgs = [
521
+ "--no-sandbox",
522
+ "--disable-setuid-sandbox",
523
+ "--disable-blink-features=AutomationControlled",
524
+ `--window-size=${initialViewport.width},${initialViewport.height}`,
525
+ ];
526
+ const proxy = process.env.PUPPETEER_PROXY;
527
+ if (proxy) {
528
+ launchArgs.push(`--proxy-server=${proxy}`);
529
+ }
530
+ const ignoreCert = process.env.PUPPETEER_PROXY_IGNORE_CERT_ERRORS?.toLowerCase();
531
+ if (ignoreCert === "true" || ignoreCert === "1" || ignoreCert === "yes" || ignoreCert === "on") {
532
+ launchArgs.push("--ignore-certificate-errors");
533
+ }
520
534
  this.#browser = await puppeteer.launch({
521
535
  headless: this.#currentHeadless,
522
536
  defaultViewport: this.#currentHeadless ? initialViewport : null,
523
- args: [
524
- "--no-sandbox",
525
- "--disable-setuid-sandbox",
526
- "--disable-blink-features=AutomationControlled",
527
- `--window-size=${initialViewport.width},${initialViewport.height}`,
528
- ],
537
+ args: launchArgs,
529
538
  ignoreDefaultArgs: [...STEALTH_IGNORE_DEFAULT_ARGS],
530
539
  });
531
540
  this.#page = await this.#browser.newPage();