oh-my-opencode-kikokikok 2.15.9 → 2.15.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli/index.js CHANGED
@@ -2253,7 +2253,7 @@ var require_picocolors = __commonJS((exports, module) => {
2253
2253
  var require_package = __commonJS((exports, module) => {
2254
2254
  module.exports = {
2255
2255
  name: "oh-my-opencode-kikokikok",
2256
- version: "2.15.9",
2256
+ version: "2.15.11",
2257
2257
  description: "OpenCode plugin - custom agents (oracle, librarian) and enhanced features",
2258
2258
  main: "dist/index.js",
2259
2259
  types: "dist/index.d.ts",
@@ -7,7 +7,21 @@ export declare class LettaAdapter {
7
7
  private modelCachePromise;
8
8
  private resolvedEmbeddingModel;
9
9
  private resolvedLlmModel;
10
+ private providerInitPromise;
10
11
  constructor(config: LettaConfig);
12
+ /**
13
+ * Ensures the openai-proxy provider is registered in Letta when:
14
+ * 1. Models with openai-proxy/ prefix exist (from OPENAI_API_BASE env var)
15
+ * 2. No user-created openai-proxy provider exists
16
+ * 3. Copilot proxy is available locally
17
+ *
18
+ * This fixes the provider mismatch where Letta's built-in OpenAI provider
19
+ * creates models with openai-proxy/ handles but the provider is named "openai",
20
+ * causing agent creation to fail.
21
+ */
22
+ ensureOpenAIProxyProvider(): Promise<void>;
23
+ private doEnsureOpenAIProxyProvider;
24
+ private isCopilotProxyAvailable;
11
25
  private getModels;
12
26
  /**
13
27
  * Resolves user model name to Letta handle. Letta requires exact handle matches
package/dist/index.js CHANGED
@@ -43783,7 +43783,15 @@ var DEFAULT_ENDPOINT2 = "http://localhost:8283";
43783
43783
  var DEFAULT_AGENT_PREFIX = "opencode";
43784
43784
  var DEFAULT_LLM_MODEL = "letta/letta-free";
43785
43785
  var DEFAULT_EMBEDDING_MODEL = "letta/letta-free";
43786
- var VALID_PROVIDERS = ["letta", "openai"];
43786
+ var VALID_PROVIDERS = ["letta", "openai", "openai-proxy"];
43787
+ var COPILOT_PROXY_ENDPOINT = "http://host.docker.internal:4141/v1";
43788
+ var COPILOT_PROXY_LOCAL = "http://localhost:4141/v1";
43789
+ function normalizeHandleForLetta(handle) {
43790
+ if (handle.startsWith("openai-proxy/")) {
43791
+ return handle.replace("openai-proxy/", "openai/");
43792
+ }
43793
+ return handle;
43794
+ }
43787
43795
 
43788
43796
  class LettaAdapter {
43789
43797
  config;
@@ -43793,10 +43801,67 @@ class LettaAdapter {
43793
43801
  modelCachePromise = null;
43794
43802
  resolvedEmbeddingModel = null;
43795
43803
  resolvedLlmModel = null;
43804
+ providerInitPromise = null;
43796
43805
  constructor(config3) {
43797
43806
  this.config = config3;
43798
43807
  this.endpoint = config3.endpoint ?? DEFAULT_ENDPOINT2;
43799
43808
  }
43809
+ async ensureOpenAIProxyProvider() {
43810
+ if (this.providerInitPromise) {
43811
+ return this.providerInitPromise;
43812
+ }
43813
+ this.providerInitPromise = this.doEnsureOpenAIProxyProvider();
43814
+ return this.providerInitPromise;
43815
+ }
43816
+ async doEnsureOpenAIProxyProvider() {
43817
+ try {
43818
+ const models = await this.getModels();
43819
+ const hasOpenAIProxyModels = models.some((m) => m.handle.startsWith("openai-proxy/"));
43820
+ if (!hasOpenAIProxyModels) {
43821
+ return;
43822
+ }
43823
+ const providersResponse = await fetch(`${this.endpoint}/v1/providers/`, {
43824
+ method: "GET",
43825
+ redirect: "follow",
43826
+ signal: AbortSignal.timeout(1e4)
43827
+ });
43828
+ if (!providersResponse.ok) {
43829
+ return;
43830
+ }
43831
+ const providers = await providersResponse.json();
43832
+ const hasOpenAIProxyProvider = providers.some((p) => p.name === "openai-proxy");
43833
+ if (hasOpenAIProxyProvider) {
43834
+ return;
43835
+ }
43836
+ const proxyAvailable = await this.isCopilotProxyAvailable();
43837
+ if (!proxyAvailable) {
43838
+ return;
43839
+ }
43840
+ await fetch(`${this.endpoint}/v1/providers/`, {
43841
+ method: "POST",
43842
+ headers: { "Content-Type": "application/json" },
43843
+ redirect: "follow",
43844
+ signal: AbortSignal.timeout(1e4),
43845
+ body: JSON.stringify({
43846
+ name: "openai-proxy",
43847
+ provider_type: "openai",
43848
+ api_key: "dummy",
43849
+ base_url: COPILOT_PROXY_ENDPOINT
43850
+ })
43851
+ });
43852
+ } catch {}
43853
+ }
43854
+ async isCopilotProxyAvailable() {
43855
+ try {
43856
+ const response2 = await fetch(`${COPILOT_PROXY_LOCAL}/models`, {
43857
+ method: "GET",
43858
+ signal: AbortSignal.timeout(5000)
43859
+ });
43860
+ return response2.ok;
43861
+ } catch {
43862
+ return false;
43863
+ }
43864
+ }
43800
43865
  async getModels() {
43801
43866
  if (this.modelCache) {
43802
43867
  return this.modelCache;
@@ -43866,7 +43931,8 @@ class LettaAdapter {
43866
43931
  }
43867
43932
  const configModel = this.config.embeddingModel;
43868
43933
  if (configModel) {
43869
- this.resolvedEmbeddingModel = await this.resolveModelHandle(configModel, true);
43934
+ const resolved = await this.resolveModelHandle(configModel, true);
43935
+ this.resolvedEmbeddingModel = normalizeHandleForLetta(resolved);
43870
43936
  } else {
43871
43937
  const models = await this.getModels();
43872
43938
  const hasValidProvider = (handle) => {
@@ -43877,7 +43943,8 @@ class LettaAdapter {
43877
43943
  if (validEmbeddingModels.length > 0) {
43878
43944
  const preferredName = this.config.preferredEmbeddingModel ?? "text-embedding-3-small";
43879
43945
  const preferred = validEmbeddingModels.find((m) => m.name.includes(preferredName));
43880
- this.resolvedEmbeddingModel = preferred?.handle ?? validEmbeddingModels[0].handle;
43946
+ const handle = preferred?.handle ?? validEmbeddingModels[0].handle;
43947
+ this.resolvedEmbeddingModel = normalizeHandleForLetta(handle);
43881
43948
  } else {
43882
43949
  this.resolvedEmbeddingModel = DEFAULT_EMBEDDING_MODEL;
43883
43950
  }
@@ -43890,7 +43957,8 @@ class LettaAdapter {
43890
43957
  }
43891
43958
  const configModel = this.config.llmModel;
43892
43959
  if (configModel) {
43893
- this.resolvedLlmModel = await this.resolveModelHandle(configModel, false);
43960
+ const resolved = await this.resolveModelHandle(configModel, false);
43961
+ this.resolvedLlmModel = normalizeHandleForLetta(resolved);
43894
43962
  } else {
43895
43963
  this.resolvedLlmModel = DEFAULT_LLM_MODEL;
43896
43964
  }
@@ -44063,7 +44131,11 @@ class LettaAdapter {
44063
44131
  redirect: "follow",
44064
44132
  signal: AbortSignal.timeout(5000)
44065
44133
  });
44066
- return response2.ok;
44134
+ if (response2.ok) {
44135
+ await this.ensureOpenAIProxyProvider();
44136
+ return true;
44137
+ }
44138
+ return false;
44067
44139
  } catch {
44068
44140
  return false;
44069
44141
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "oh-my-opencode-kikokikok",
3
- "version": "2.15.9",
3
+ "version": "2.15.11",
4
4
  "description": "OpenCode plugin - custom agents (oracle, librarian) and enhanced features",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",