stratagem-x7 0.3.4 → 0.3.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/cli.mjs +103 -29
  2. package/package.json +1 -1
package/dist/cli.mjs CHANGED
@@ -35467,7 +35467,7 @@ function resolveProfileFilePath(options) {
35467
35467
  return resolve3(options?.cwd ?? process.cwd(), PROFILE_FILE_NAME);
35468
35468
  }
35469
35469
  function isProviderProfile(value) {
35470
- return value === "openai" || value === "ollama" || value === "codex" || value === "gemini" || value === "atomic-chat" || value === "nvidia-nim" || value === "minimax" || value === "mistral";
35470
+ return value === "openai" || value === "ollama" || value === "codex" || value === "gemini" || value === "atomic-chat" || value === "nvidia-nim" || value === "minimax" || value === "mistral" || value === "opencode" || value === "opencode-go";
35471
35471
  }
35472
35472
  function buildOllamaProfileEnv(model, options) {
35473
35473
  return {
@@ -35756,6 +35756,25 @@ async function buildLaunchEnv(options) {
35756
35756
  delete env3.CODEX_ACCOUNT_ID;
35757
35757
  return env3;
35758
35758
  }
35759
+ if (options.profile === "opencode" || options.profile === "opencode-go") {
35760
+ const isGo = options.profile === "opencode-go";
35761
+ const defaultBaseUrl = isGo ? DEFAULT_OPENCODE_GO_BASE_URL : DEFAULT_OPENCODE_BASE_URL;
35762
+ const defaultModel = isGo ? DEFAULT_OPENCODE_GO_MODEL : DEFAULT_OPENCODE_MODEL;
35763
+ env3.OPENAI_BASE_URL = persistedOpenAIBaseUrl || defaultBaseUrl;
35764
+ env3.OPENAI_MODEL = persistedOpenAIModel || defaultModel;
35765
+ const shellKey = sanitizeApiKey(processEnv.OPENCODE_API_KEY);
35766
+ const persistedKey = sanitizeApiKey(persistedEnv.OPENCODE_API_KEY);
35767
+ const key = shellKey || persistedKey || sanitizeApiKey(processEnv.OPENAI_API_KEY) || sanitizeApiKey(persistedEnv.OPENAI_API_KEY);
35768
+ if (key) {
35769
+ env3.OPENAI_API_KEY = key;
35770
+ } else {
35771
+ delete env3.OPENAI_API_KEY;
35772
+ }
35773
+ delete env3.CODEX_API_KEY;
35774
+ delete env3.CHATGPT_ACCOUNT_ID;
35775
+ delete env3.CODEX_ACCOUNT_ID;
35776
+ return env3;
35777
+ }
35759
35778
  if (options.profile === "codex") {
35760
35779
  env3.OPENAI_BASE_URL = persistedOpenAIBaseUrl && isCodexBaseUrl(persistedOpenAIBaseUrl) ? persistedOpenAIBaseUrl : DEFAULT_CODEX_BASE_URL;
35761
35780
  env3.OPENAI_MODEL = persistedOpenAIModel || "codexplan";
@@ -35860,7 +35879,7 @@ async function applySavedProfileToCurrentSession(options) {
35860
35879
  applyProfileEnvToProcessEnv(processEnv, nextEnv);
35861
35880
  return null;
35862
35881
  }
35863
- var PROFILE_FILE_NAME = ".stratagem-profile.json", DEFAULT_GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai", DEFAULT_GEMINI_MODEL = "gemini-2.0-flash", DEFAULT_MISTRAL_BASE_URL2 = "https://api.mistral.ai/v1", DEFAULT_MISTRAL_MODEL = "devstral-latest", PROFILE_ENV_KEYS;
35882
+ var PROFILE_FILE_NAME = ".stratagem-profile.json", DEFAULT_GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai", DEFAULT_GEMINI_MODEL = "gemini-2.0-flash", DEFAULT_MISTRAL_BASE_URL2 = "https://api.mistral.ai/v1", DEFAULT_MISTRAL_MODEL = "devstral-latest", DEFAULT_OPENCODE_BASE_URL = "https://opencode.ai/zen/v1", DEFAULT_OPENCODE_GO_BASE_URL = "https://opencode.ai/zen/go/v1", DEFAULT_OPENCODE_MODEL = "gpt-5.4", DEFAULT_OPENCODE_GO_MODEL = "glm-5", PROFILE_ENV_KEYS;
35864
35883
  var init_providerProfile = __esm(() => {
35865
35884
  init_providerConfig();
35866
35885
  init_codexOAuthShared();
@@ -35898,7 +35917,8 @@ var init_providerProfile = __esm(() => {
35898
35917
  "MINIMAX_MODEL",
35899
35918
  "MISTRAL_BASE_URL",
35900
35919
  "MISTRAL_API_KEY",
35901
- "MISTRAL_MODEL"
35920
+ "MISTRAL_MODEL",
35921
+ "OPENCODE_API_KEY"
35902
35922
  ];
35903
35923
  });
35904
35924
 
@@ -109001,7 +109021,9 @@ var init_configs = __esm(() => {
109001
109021
  github: "github:copilot",
109002
109022
  codex: "gpt-5.4",
109003
109023
  "nvidia-nim": "nvidia/llama-3.1-nemotron-70b-instruct",
109004
- minimax: "MiniMax-M2.5"
109024
+ minimax: "MiniMax-M2.5",
109025
+ mistral: "devstral-latest",
109026
+ opencode: "gpt-5.4"
109005
109027
  };
109006
109028
  CLAUDE_3_5_V2_SONNET_CONFIG = {
109007
109029
  firstParty: "claude-3-5-sonnet-20241022",
@@ -109013,7 +109035,9 @@ var init_configs = __esm(() => {
109013
109035
  github: "github:copilot",
109014
109036
  codex: "gpt-5.4",
109015
109037
  "nvidia-nim": "nvidia/llama-3.1-nemotron-70b-instruct",
109016
- minimax: "MiniMax-M2.5"
109038
+ minimax: "MiniMax-M2.5",
109039
+ mistral: "devstral-latest",
109040
+ opencode: "gpt-5.4"
109017
109041
  };
109018
109042
  CLAUDE_3_5_HAIKU_CONFIG = {
109019
109043
  firstParty: "claude-3-5-haiku-20241022",
@@ -109025,7 +109049,9 @@ var init_configs = __esm(() => {
109025
109049
  github: "github:copilot",
109026
109050
  codex: "gpt-5.4",
109027
109051
  "nvidia-nim": "nvidia/llama-3.1-nemotron-70b-instruct",
109028
- minimax: "MiniMax-M2.5"
109052
+ minimax: "MiniMax-M2.5",
109053
+ mistral: "devstral-latest",
109054
+ opencode: "gpt-5.4"
109029
109055
  };
109030
109056
  CLAUDE_HAIKU_4_5_CONFIG = {
109031
109057
  firstParty: "claude-haiku-4-5-20251001",
@@ -109037,7 +109063,9 @@ var init_configs = __esm(() => {
109037
109063
  github: "github:copilot",
109038
109064
  codex: "gpt-5.4",
109039
109065
  "nvidia-nim": "nvidia/llama-3.1-nemotron-70b-instruct",
109040
- minimax: "MiniMax-M2.5"
109066
+ minimax: "MiniMax-M2.5",
109067
+ mistral: "devstral-latest",
109068
+ opencode: "gpt-5.4"
109041
109069
  };
109042
109070
  CLAUDE_SONNET_4_CONFIG = {
109043
109071
  firstParty: "claude-sonnet-4-20250514",
@@ -109049,7 +109077,9 @@ var init_configs = __esm(() => {
109049
109077
  github: "github:copilot",
109050
109078
  codex: "gpt-5.4",
109051
109079
  "nvidia-nim": "nvidia/llama-3.1-nemotron-70b-instruct",
109052
- minimax: "MiniMax-M2.5"
109080
+ minimax: "MiniMax-M2.5",
109081
+ mistral: "devstral-latest",
109082
+ opencode: "gpt-5.4"
109053
109083
  };
109054
109084
  CLAUDE_SONNET_4_5_CONFIG = {
109055
109085
  firstParty: "claude-sonnet-4-5-20250929",
@@ -109061,7 +109091,9 @@ var init_configs = __esm(() => {
109061
109091
  github: "github:copilot",
109062
109092
  codex: "gpt-5.4",
109063
109093
  "nvidia-nim": "nvidia/llama-3.1-nemotron-70b-instruct",
109064
- minimax: "MiniMax-M2.5"
109094
+ minimax: "MiniMax-M2.5",
109095
+ mistral: "devstral-latest",
109096
+ opencode: "gpt-5.4"
109065
109097
  };
109066
109098
  CLAUDE_OPUS_4_CONFIG = {
109067
109099
  firstParty: "claude-opus-4-20250514",
@@ -109073,7 +109105,9 @@ var init_configs = __esm(() => {
109073
109105
  github: "github:copilot",
109074
109106
  codex: "gpt-5.4",
109075
109107
  "nvidia-nim": "nvidia/llama-3.1-nemotron-70b-instruct",
109076
- minimax: "MiniMax-M2.5"
109108
+ minimax: "MiniMax-M2.5",
109109
+ mistral: "devstral-latest",
109110
+ opencode: "gpt-5.4"
109077
109111
  };
109078
109112
  CLAUDE_OPUS_4_1_CONFIG = {
109079
109113
  firstParty: "claude-opus-4-1-20250805",
@@ -109085,7 +109119,9 @@ var init_configs = __esm(() => {
109085
109119
  github: "github:copilot",
109086
109120
  codex: "gpt-5.4",
109087
109121
  "nvidia-nim": "nvidia/llama-3.1-nemotron-70b-instruct",
109088
- minimax: "MiniMax-M2.5"
109122
+ minimax: "MiniMax-M2.5",
109123
+ mistral: "devstral-latest",
109124
+ opencode: "gpt-5.4"
109089
109125
  };
109090
109126
  CLAUDE_OPUS_4_5_CONFIG = {
109091
109127
  firstParty: "claude-opus-4-5-20251101",
@@ -109097,7 +109133,9 @@ var init_configs = __esm(() => {
109097
109133
  github: "github:copilot",
109098
109134
  codex: "gpt-5.4",
109099
109135
  "nvidia-nim": "nvidia/llama-3.1-nemotron-70b-instruct",
109100
- minimax: "MiniMax-M2.5"
109136
+ minimax: "MiniMax-M2.5",
109137
+ mistral: "devstral-latest",
109138
+ opencode: "gpt-5.4"
109101
109139
  };
109102
109140
  CLAUDE_OPUS_4_6_CONFIG = {
109103
109141
  firstParty: "claude-opus-4-6",
@@ -109109,7 +109147,9 @@ var init_configs = __esm(() => {
109109
109147
  github: "github:copilot",
109110
109148
  codex: "gpt-5.4",
109111
109149
  "nvidia-nim": "nvidia/llama-3.1-nemotron-70b-instruct",
109112
- minimax: "MiniMax-M2.5"
109150
+ minimax: "MiniMax-M2.5",
109151
+ mistral: "devstral-latest",
109152
+ opencode: "gpt-5.4"
109113
109153
  };
109114
109154
  CLAUDE_SONNET_4_6_CONFIG = {
109115
109155
  firstParty: "claude-sonnet-4-6",
@@ -109121,7 +109161,9 @@ var init_configs = __esm(() => {
109121
109161
  github: "github:copilot",
109122
109162
  codex: "gpt-5.4",
109123
109163
  "nvidia-nim": "nvidia/llama-3.1-nemotron-70b-instruct",
109124
- minimax: "MiniMax-M2.5"
109164
+ minimax: "MiniMax-M2.5",
109165
+ mistral: "devstral-latest",
109166
+ opencode: "gpt-5.4"
109125
109167
  };
109126
109168
  ALL_MODEL_CONFIGS = {
109127
109169
  haiku35: CLAUDE_3_5_HAIKU_CONFIG,
@@ -109148,6 +109190,9 @@ function getAPIProvider() {
109148
109190
  if (isEnvTruthy(process.env.MINIMAX_API_KEY)) {
109149
109191
  return "minimax";
109150
109192
  }
109193
+ if (process.env.OPENAI_BASE_URL?.includes("opencode.ai")) {
109194
+ return "opencode";
109195
+ }
109151
109196
  return isEnvTruthy(process.env.CLAUDE_CODE_USE_GEMINI) ? "gemini" : isEnvTruthy(process.env.CLAUDE_CODE_USE_MISTRAL) ? "mistral" : isEnvTruthy(process.env.CLAUDE_CODE_USE_GITHUB) ? "github" : isEnvTruthy(process.env.CLAUDE_CODE_USE_OPENAI) ? isCodexModel() ? "codex" : "openai" : isEnvTruthy(process.env.CLAUDE_CODE_USE_BEDROCK) ? "bedrock" : isEnvTruthy(process.env.CLAUDE_CODE_USE_VERTEX) ? "vertex" : isEnvTruthy(process.env.CLAUDE_CODE_USE_FOUNDRY) ? "foundry" : "firstParty";
109152
109197
  }
109153
109198
  function usesAnthropicAccountFlow() {
@@ -110561,7 +110606,7 @@ function getSmallFastModel() {
110561
110606
  if (getAPIProvider() === "mistral") {
110562
110607
  return process.env.MISTRAL_MODEL || "ministral-3b-latest";
110563
110608
  }
110564
- if (getAPIProvider() === "openai") {
110609
+ if (getAPIProvider() === "openai" || getAPIProvider() === "opencode") {
110565
110610
  return process.env.OPENAI_MODEL || "gpt-4o-mini";
110566
110611
  }
110567
110612
  if (getAPIProvider() === "github") {
@@ -110581,7 +110626,7 @@ function getUserSpecifiedModelSetting() {
110581
110626
  const settings = getSettings_DEPRECATED() || {};
110582
110627
  const setting = normalizeModelSetting(settings.model);
110583
110628
  const provider = getAPIProvider();
110584
- specifiedModel = (provider === "gemini" ? process.env.GEMINI_MODEL : undefined) || (provider === "mistral" ? process.env.MISTRAL_MODEL : undefined) || (provider === "openai" || provider === "gemini" || provider === "mistral" || provider === "github" ? process.env.OPENAI_MODEL : undefined) || (provider === "firstParty" ? process.env.ANTHROPIC_MODEL : undefined) || setting || undefined;
110629
+ specifiedModel = (provider === "gemini" ? process.env.GEMINI_MODEL : undefined) || (provider === "mistral" ? process.env.MISTRAL_MODEL : undefined) || (provider === "openai" || provider === "gemini" || provider === "mistral" || provider === "github" || provider === "opencode" ? process.env.OPENAI_MODEL : undefined) || (provider === "firstParty" ? process.env.ANTHROPIC_MODEL : undefined) || setting || undefined;
110585
110630
  }
110586
110631
  if (specifiedModel && !isModelAllowed(specifiedModel)) {
110587
110632
  return;
@@ -110608,7 +110653,7 @@ function getDefaultOpusModel() {
110608
110653
  if (getAPIProvider() === "mistral") {
110609
110654
  return process.env.MISTRAL_MODEL || "devstral-latest";
110610
110655
  }
110611
- if (getAPIProvider() === "openai") {
110656
+ if (getAPIProvider() === "openai" || getAPIProvider() === "opencode") {
110612
110657
  return process.env.OPENAI_MODEL || "gpt-4o";
110613
110658
  }
110614
110659
  if (getAPIProvider() === "codex") {
@@ -110632,7 +110677,7 @@ function getDefaultSonnetModel() {
110632
110677
  if (getAPIProvider() === "mistral") {
110633
110678
  return process.env.MISTRAL_MODEL || "mistral-medium-latest";
110634
110679
  }
110635
- if (getAPIProvider() === "openai") {
110680
+ if (getAPIProvider() === "openai" || getAPIProvider() === "opencode") {
110636
110681
  return process.env.OPENAI_MODEL || "gpt-4o";
110637
110682
  }
110638
110683
  if (getAPIProvider() === "codex") {
@@ -110653,7 +110698,7 @@ function getDefaultHaikuModel() {
110653
110698
  if (getAPIProvider() === "mistral") {
110654
110699
  return process.env.MISTRAL_MODEL || "ministral-3b-latest";
110655
110700
  }
110656
- if (getAPIProvider() === "openai") {
110701
+ if (getAPIProvider() === "openai" || getAPIProvider() === "opencode") {
110657
110702
  return process.env.OPENAI_MODEL || "gpt-4o-mini";
110658
110703
  }
110659
110704
  if (getAPIProvider() === "codex") {
@@ -110688,7 +110733,7 @@ function getDefaultMainLoopModelSetting() {
110688
110733
  if (getAPIProvider() === "mistral") {
110689
110734
  return process.env.MISTRAL_MODEL || "devstral-latest";
110690
110735
  }
110691
- if (getAPIProvider() === "openai") {
110736
+ if (getAPIProvider() === "openai" || getAPIProvider() === "opencode") {
110692
110737
  return process.env.OPENAI_MODEL || "gpt-4o";
110693
110738
  }
110694
110739
  if (getAPIProvider() === "codex") {
@@ -110808,7 +110853,7 @@ function renderModelSetting(setting) {
110808
110853
  return renderModelName(setting);
110809
110854
  }
110810
110855
  function getPublicModelDisplayName(model) {
110811
- if (getAPIProvider() === "openai" || getAPIProvider() === "gemini" || getAPIProvider() === "codex" || getAPIProvider() === "github") {
110856
+ if (getAPIProvider() === "openai" || getAPIProvider() === "gemini" || getAPIProvider() === "codex" || getAPIProvider() === "github" || getAPIProvider() === "opencode") {
110812
110857
  const copilotModelNames = {
110813
110858
  "gpt-5.4": "GPT-5.4",
110814
110859
  "gpt-5.4-mini": "GPT-5.4 mini",
@@ -113484,6 +113529,24 @@ function getProviderPresetDefaults(preset) {
113484
113529
  apiKey: "",
113485
113530
  requiresApiKey: true
113486
113531
  };
113532
+ case "opencode":
113533
+ return {
113534
+ provider: "openai",
113535
+ name: "OpenCode Zen",
113536
+ baseUrl: "https://opencode.ai/zen/v1",
113537
+ model: "gpt-5.4",
113538
+ apiKey: process.env.OPENCODE_API_KEY ?? "",
113539
+ requiresApiKey: true
113540
+ };
113541
+ case "opencode-go":
113542
+ return {
113543
+ provider: "openai",
113544
+ name: "OpenCode Go",
113545
+ baseUrl: "https://opencode.ai/zen/go/v1",
113546
+ model: "glm-5",
113547
+ apiKey: process.env.OPENCODE_API_KEY ?? "",
113548
+ requiresApiKey: true
113549
+ };
113487
113550
  case "ollama":
113488
113551
  default:
113489
113552
  return {
@@ -113552,6 +113615,7 @@ function clearProviderProfileEnvFromProcessEnv(processEnv = process.env) {
113552
113615
  delete processEnv.MINIMAX_API_KEY;
113553
113616
  delete processEnv.NVIDIA_API_KEY;
113554
113617
  delete processEnv.NVIDIA_NIM;
113618
+ delete processEnv.OPENCODE_API_KEY;
113555
113619
  }
113556
113620
  function applyProviderProfileToProcessEnv(profile) {
113557
113621
  clearProviderProfileEnvFromProcessEnv();
@@ -281635,6 +281699,16 @@ function ProviderManager({ mode, onDone }) {
281635
281699
  label: "Ollama Cloud",
281636
281700
  description: "Ollama Cloud API — just needs API key"
281637
281701
  },
281702
+ {
281703
+ value: "opencode",
281704
+ label: "OpenCode Zen",
281705
+ description: "OpenCode Zen proxy — premium multi-provider API"
281706
+ },
281707
+ {
281708
+ value: "opencode-go",
281709
+ label: "OpenCode Go",
281710
+ description: "OpenCode Go proxy — lightweight multi-provider API"
281711
+ },
281638
281712
  ...mode === "first-run" ? [
281639
281713
  {
281640
281714
  value: "skip",
@@ -382714,7 +382788,7 @@ function getAnthropicEnvMetadata() {
382714
382788
  function getBuildAgeMinutes() {
382715
382789
  if (false)
382716
382790
  ;
382717
- const buildTime = new Date("2026-04-24T07:53:54.383Z").getTime();
382791
+ const buildTime = new Date("2026-04-24T10:50:02.487Z").getTime();
382718
382792
  if (isNaN(buildTime))
382719
382793
  return;
382720
382794
  return Math.floor((Date.now() - buildTime) / 60000);
@@ -409893,7 +409967,7 @@ function buildPrimarySection() {
409893
409967
  }, undefined, false, undefined, this);
409894
409968
  return [{
409895
409969
  label: "Version",
409896
- value: "0.3.4"
409970
+ value: "0.3.5"
409897
409971
  }, {
409898
409972
  label: "Session name",
409899
409973
  value: nameValue
@@ -449521,7 +449595,7 @@ function getStartupLines(termWidth) {
449521
449595
  const sLen = ` ● ${sL} buffer ready — /help for breach controls`.length;
449522
449596
  out.push(centerAnsiLine(boxRow(sRow, W2, sLen), tw));
449523
449597
  out.push(centerAnsiLine(`${rgb3(...BORDER)}└${"─".repeat(W2 - 2)}┘${RESET2}`, tw));
449524
- out.push(centerAnsiLine(`${rgb3(...DIMCOL)}STRATAGEM X7${RESET2} ${rgb3(...ACCENT)}v${"0.3.4"}${RESET2} ${rgb3(...CYAN)}// breach link stable${RESET2}`, tw));
449598
+ out.push(centerAnsiLine(`${rgb3(...DIMCOL)}STRATAGEM X7${RESET2} ${rgb3(...ACCENT)}v${"0.3.5"}${RESET2} ${rgb3(...CYAN)}// breach link stable${RESET2}`, tw));
449525
449599
  out.push("");
449526
449600
  return out;
449527
449601
  }
@@ -478070,7 +478144,7 @@ var init_bridge_kick = __esm(() => {
478070
478144
  var call60 = async () => {
478071
478145
  return {
478072
478146
  type: "text",
478073
- value: `${"99.0.0"} (built ${"2026-04-24T07:53:54.383Z"})`
478147
+ value: `${"99.0.0"} (built ${"2026-04-24T10:50:02.487Z"})`
478074
478148
  };
478075
478149
  }, version2, version_default;
478076
478150
  var init_version = __esm(() => {
@@ -553461,7 +553535,7 @@ function WelcomeV2() {
553461
553535
  dimColor: true,
553462
553536
  children: [
553463
553537
  "v",
553464
- "0.3.4",
553538
+ "0.3.5",
553465
553539
  " "
553466
553540
  ]
553467
553541
  }, undefined, true, undefined, this)
@@ -573479,7 +573553,7 @@ Usage: stx7 --remote "your task description"`, () => gracefulShutdown(1));
573479
573553
  pendingHookMessages
573480
573554
  }, renderAndRun);
573481
573555
  }
573482
- }).version("0.3.4 (STRATAGEM X7)", "-v, --version", "Output the version number");
573556
+ }).version("0.3.5 (STRATAGEM X7)", "-v, --version", "Output the version number");
573483
573557
  program2.option("-w, --worktree [name]", "Create a new git worktree for this session (optionally specify a name)");
573484
573558
  program2.option("--tmux", "Create a tmux session for the worktree (requires --worktree). Uses iTerm2 native panes when available; use --tmux=classic for traditional tmux.");
573485
573559
  if (canUserConfigureAdvisor()) {
@@ -574008,7 +574082,7 @@ if (false) {}
574008
574082
  async function main2() {
574009
574083
  const args = process.argv.slice(2);
574010
574084
  if (args.length === 1 && (args[0] === "--version" || args[0] === "-v" || args[0] === "-V")) {
574011
- console.log(`${"0.3.4"} (STRATAGEM X7)`);
574085
+ console.log(`${"0.3.5"} (STRATAGEM X7)`);
574012
574086
  return;
574013
574087
  }
574014
574088
  if (args.includes("--provider")) {
@@ -574130,4 +574204,4 @@ async function main2() {
574130
574204
  }
574131
574205
  main2();
574132
574206
 
574133
- //# debugId=1704741C7842E25964756E2164756E21
574207
+ //# debugId=652CAA3C920DAF9864756E2164756E21
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "stratagem-x7",
3
- "version": "0.3.4",
3
+ "version": "0.3.5",
4
4
  "description": "STRATAGEM X7 is a cyberpunk coding-agent CLI for cloud and local model providers",
5
5
  "type": "module",
6
6
  "bin": {