mpd-llm-cli 0.1.35 → 0.1.37

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/bundle/api.cjs CHANGED
@@ -27594,12 +27594,19 @@ var init_custom_llm = __esm({
27594
27594
  temperature = Number(process.env.CUSTOM_LLM_TEMPERATURE || 0);
27595
27595
  maxTokens = Number(process.env.CUSTOM_LLM_MAX_TOKENS || 8192);
27596
27596
  topP = Number(process.env.CUSTOM_LLM_TOP_P || 1);
27597
- config = {
27598
- model: this.modelName,
27599
- temperature: this.temperature,
27600
- max_tokens: this.maxTokens,
27601
- top_p: this.topP
27602
- };
27597
+ getConfig() {
27598
+ const config2 = {
27599
+ model: this.modelName,
27600
+ max_tokens: this.maxTokens
27601
+ };
27602
+ if (this.temperature !== 0) {
27603
+ config2.temperature = this.temperature;
27604
+ }
27605
+ if (this.topP !== 0) {
27606
+ config2.top_p = this.topP;
27607
+ }
27608
+ return config2;
27609
+ }
27603
27610
  constructor() {
27604
27611
  this.model = new OpenAI({
27605
27612
  apiKey: this.apiKey,
@@ -27625,7 +27632,7 @@ var init_custom_llm = __esm({
27625
27632
  stream: true,
27626
27633
  tools,
27627
27634
  stream_options: { include_usage: true },
27628
- ...this.config
27635
+ ...this.getConfig()
27629
27636
  });
27630
27637
  const map2 = /* @__PURE__ */ new Map();
27631
27638
  return async function* () {
@@ -27651,7 +27658,7 @@ var init_custom_llm = __esm({
27651
27658
  const completion = await this.model.chat.completions.create({
27652
27659
  messages,
27653
27660
  stream: false,
27654
- ...this.config
27661
+ ...this.getConfig()
27655
27662
  });
27656
27663
  return ModelConverter.toGeminiResponse(completion);
27657
27664
  }
package/bundle/api.js CHANGED
@@ -27598,12 +27598,19 @@ var init_custom_llm = __esm({
27598
27598
  temperature = Number(process.env.CUSTOM_LLM_TEMPERATURE || 0);
27599
27599
  maxTokens = Number(process.env.CUSTOM_LLM_MAX_TOKENS || 8192);
27600
27600
  topP = Number(process.env.CUSTOM_LLM_TOP_P || 1);
27601
- config = {
27602
- model: this.modelName,
27603
- temperature: this.temperature,
27604
- max_tokens: this.maxTokens,
27605
- top_p: this.topP
27606
- };
27601
+ getConfig() {
27602
+ const config2 = {
27603
+ model: this.modelName,
27604
+ max_tokens: this.maxTokens
27605
+ };
27606
+ if (this.temperature !== 0) {
27607
+ config2.temperature = this.temperature;
27608
+ }
27609
+ if (this.topP !== 0) {
27610
+ config2.top_p = this.topP;
27611
+ }
27612
+ return config2;
27613
+ }
27607
27614
  constructor() {
27608
27615
  this.model = new OpenAI({
27609
27616
  apiKey: this.apiKey,
@@ -27629,7 +27636,7 @@ var init_custom_llm = __esm({
27629
27636
  stream: true,
27630
27637
  tools,
27631
27638
  stream_options: { include_usage: true },
27632
- ...this.config
27639
+ ...this.getConfig()
27633
27640
  });
27634
27641
  const map2 = /* @__PURE__ */ new Map();
27635
27642
  return async function* () {
@@ -27655,7 +27662,7 @@ var init_custom_llm = __esm({
27655
27662
  const completion = await this.model.chat.completions.create({
27656
27663
  messages,
27657
27664
  stream: false,
27658
- ...this.config
27665
+ ...this.getConfig()
27659
27666
  });
27660
27667
  return ModelConverter.toGeminiResponse(completion);
27661
27668
  }
@@ -27717,7 +27724,7 @@ async function createContentGeneratorConfig(model, authType) {
27717
27724
  return contentGeneratorConfig;
27718
27725
  }
27719
27726
  async function createContentGenerator(config2, sessionId2) {
27720
- const version2 = "0.1.35";
27727
+ const version2 = "0.1.37";
27721
27728
  const httpOptions = {
27722
27729
  headers: {
27723
27730
  "User-Agent": `GeminiCLI/${version2} (${process.platform}; ${process.arch})`
@@ -166405,7 +166412,7 @@ async function getPackageJson() {
166405
166412
  // packages/cli/src/utils/version.ts
166406
166413
  async function getCliVersion() {
166407
166414
  const pkgJson = await getPackageJson();
166408
- return "0.1.35";
166415
+ return "0.1.37";
166409
166416
  }
166410
166417
 
166411
166418
  // packages/cli/src/config/sandboxConfig.ts
package/bundle/gemini.js CHANGED
@@ -62225,12 +62225,19 @@ var init_custom_llm = __esm({
62225
62225
  temperature = Number(process.env.CUSTOM_LLM_TEMPERATURE || 0);
62226
62226
  maxTokens = Number(process.env.CUSTOM_LLM_MAX_TOKENS || 8192);
62227
62227
  topP = Number(process.env.CUSTOM_LLM_TOP_P || 1);
62228
- config = {
62229
- model: this.modelName,
62230
- temperature: this.temperature,
62231
- max_tokens: this.maxTokens,
62232
- top_p: this.topP
62233
- };
62228
+ getConfig() {
62229
+ const config2 = {
62230
+ model: this.modelName,
62231
+ max_tokens: this.maxTokens
62232
+ };
62233
+ if (this.temperature !== 0) {
62234
+ config2.temperature = this.temperature;
62235
+ }
62236
+ if (this.topP !== 0) {
62237
+ config2.top_p = this.topP;
62238
+ }
62239
+ return config2;
62240
+ }
62234
62241
  constructor() {
62235
62242
  this.model = new OpenAI({
62236
62243
  apiKey: this.apiKey,
@@ -62256,7 +62263,7 @@ var init_custom_llm = __esm({
62256
62263
  stream: true,
62257
62264
  tools,
62258
62265
  stream_options: { include_usage: true },
62259
- ...this.config
62266
+ ...this.getConfig()
62260
62267
  });
62261
62268
  const map2 = /* @__PURE__ */ new Map();
62262
62269
  return async function* () {
@@ -62282,7 +62289,7 @@ var init_custom_llm = __esm({
62282
62289
  const completion = await this.model.chat.completions.create({
62283
62290
  messages,
62284
62291
  stream: false,
62285
- ...this.config
62292
+ ...this.getConfig()
62286
62293
  });
62287
62294
  return ModelConverter.toGeminiResponse(completion);
62288
62295
  }
@@ -62344,7 +62351,7 @@ async function createContentGeneratorConfig(model, authType) {
62344
62351
  return contentGeneratorConfig;
62345
62352
  }
62346
62353
  async function createContentGenerator(config2, sessionId2) {
62347
- const version3 = "0.1.35";
62354
+ const version3 = "0.1.37";
62348
62355
  const httpOptions = {
62349
62356
  headers: {
62350
62357
  "User-Agent": `GeminiCLI/${version3} (${process.platform}; ${process.arch})`
@@ -199404,7 +199411,7 @@ var init_langfuseClient = __esm({
199404
199411
  userId,
199405
199412
  metadata: {
199406
199413
  ...safeMetadata,
199407
- cli_version: this.safeString("0.1.35", "unknown"),
199414
+ cli_version: this.safeString("0.1.37", "unknown"),
199408
199415
  model: this.safeString(process.env.CUSTOM_LLM_MODEL_NAME, "gemini"),
199409
199416
  auth_type: process.env.USE_CUSTOM_LLM ? "custom_llm" : "google_oauth",
199410
199417
  environment: this.safeString(this.configManager.getConfig()?.environment, "unknown")
@@ -200571,7 +200578,7 @@ var init_langfuseIntegration = __esm({
200571
200578
  const metadata = {
200572
200579
  model: this.config.getModel(),
200573
200580
  auth_type: this.config.getContentGeneratorConfig()?.authType,
200574
- cli_version: "0.1.35",
200581
+ cli_version: "0.1.37",
200575
200582
  start_time: (/* @__PURE__ */ new Date()).toISOString(),
200576
200583
  session_id: this.sessionId
200577
200584
  };
@@ -200630,7 +200637,7 @@ var init_langfuseIntegration = __esm({
200630
200637
  totalCachedTokens: sessionStats.totalCachedTokens,
200631
200638
  totalPromptTokens: sessionStats.totalPromptTokens,
200632
200639
  metadata: {
200633
- cli_version: "0.1.35",
200640
+ cli_version: "0.1.37",
200634
200641
  auth_type: this.config.getContentGeneratorConfig()?.authType,
200635
200642
  session_end_time: (/* @__PURE__ */ new Date()).toISOString()
200636
200643
  }
@@ -200702,7 +200709,7 @@ var init_langfuseIntegration = __esm({
200702
200709
  error,
200703
200710
  metadata: {
200704
200711
  session_id: this.sessionId,
200705
- cli_version: "0.1.35",
200712
+ cli_version: "0.1.37",
200706
200713
  auth_type: this.config.getContentGeneratorConfig()?.authType
200707
200714
  }
200708
200715
  });
@@ -274465,465 +274472,6 @@ var require_static = __commonJS({
274465
274472
  }
274466
274473
  });
274467
274474
 
274468
- // node_modules/tiktoken/tiktoken_bg.cjs
274469
- var require_tiktoken_bg = __commonJS({
274470
- "node_modules/tiktoken/tiktoken_bg.cjs"(exports2, module2) {
274471
- var wasm2;
274472
- module2.exports.__wbg_set_wasm = function(val) {
274473
- wasm2 = val;
274474
- };
274475
- var lTextDecoder = typeof TextDecoder === "undefined" ? (0, module2.require)("util").TextDecoder : TextDecoder;
274476
- var cachedTextDecoder = new lTextDecoder("utf-8", { ignoreBOM: true, fatal: true });
274477
- cachedTextDecoder.decode();
274478
- var cachedUint8ArrayMemory0 = null;
274479
- function getUint8ArrayMemory0() {
274480
- if (cachedUint8ArrayMemory0 === null || cachedUint8ArrayMemory0.byteLength === 0) {
274481
- cachedUint8ArrayMemory0 = new Uint8Array(wasm2.memory.buffer);
274482
- }
274483
- return cachedUint8ArrayMemory0;
274484
- }
274485
- function getStringFromWasm0(ptr, len) {
274486
- ptr = ptr >>> 0;
274487
- return cachedTextDecoder.decode(getUint8ArrayMemory0().subarray(ptr, ptr + len));
274488
- }
274489
- var heap = new Array(128).fill(void 0);
274490
- heap.push(void 0, null, true, false);
274491
- var heap_next = heap.length;
274492
- function addHeapObject(obj) {
274493
- if (heap_next === heap.length) heap.push(heap.length + 1);
274494
- const idx = heap_next;
274495
- heap_next = heap[idx];
274496
- heap[idx] = obj;
274497
- return idx;
274498
- }
274499
- function handleError(f, args) {
274500
- try {
274501
- return f.apply(this, args);
274502
- } catch (e2) {
274503
- wasm2.__wbindgen_export_0(addHeapObject(e2));
274504
- }
274505
- }
274506
- function getObject2(idx) {
274507
- return heap[idx];
274508
- }
274509
- function dropObject(idx) {
274510
- if (idx < 132) return;
274511
- heap[idx] = heap_next;
274512
- heap_next = idx;
274513
- }
274514
- function takeObject(idx) {
274515
- const ret = getObject2(idx);
274516
- dropObject(idx);
274517
- return ret;
274518
- }
274519
- var WASM_VECTOR_LEN = 0;
274520
- var lTextEncoder = typeof TextEncoder === "undefined" ? (0, module2.require)("util").TextEncoder : TextEncoder;
274521
- var cachedTextEncoder = new lTextEncoder("utf-8");
274522
- var encodeString = typeof cachedTextEncoder.encodeInto === "function" ? function(arg, view) {
274523
- return cachedTextEncoder.encodeInto(arg, view);
274524
- } : function(arg, view) {
274525
- const buf = cachedTextEncoder.encode(arg);
274526
- view.set(buf);
274527
- return {
274528
- read: arg.length,
274529
- written: buf.length
274530
- };
274531
- };
274532
- function passStringToWasm0(arg, malloc, realloc) {
274533
- if (realloc === void 0) {
274534
- const buf = cachedTextEncoder.encode(arg);
274535
- const ptr2 = malloc(buf.length, 1) >>> 0;
274536
- getUint8ArrayMemory0().subarray(ptr2, ptr2 + buf.length).set(buf);
274537
- WASM_VECTOR_LEN = buf.length;
274538
- return ptr2;
274539
- }
274540
- let len = arg.length;
274541
- let ptr = malloc(len, 1) >>> 0;
274542
- const mem = getUint8ArrayMemory0();
274543
- let offset = 0;
274544
- for (; offset < len; offset++) {
274545
- const code = arg.charCodeAt(offset);
274546
- if (code > 127) break;
274547
- mem[ptr + offset] = code;
274548
- }
274549
- if (offset !== len) {
274550
- if (offset !== 0) {
274551
- arg = arg.slice(offset);
274552
- }
274553
- ptr = realloc(ptr, len, len = offset + arg.length * 3, 1) >>> 0;
274554
- const view = getUint8ArrayMemory0().subarray(ptr + offset, ptr + len);
274555
- const ret = encodeString(arg, view);
274556
- offset += ret.written;
274557
- ptr = realloc(ptr, len, offset, 1) >>> 0;
274558
- }
274559
- WASM_VECTOR_LEN = offset;
274560
- return ptr;
274561
- }
274562
- function isLikeNone(x) {
274563
- return x === void 0 || x === null;
274564
- }
274565
- var cachedDataViewMemory0 = null;
274566
- function getDataViewMemory0() {
274567
- if (cachedDataViewMemory0 === null || cachedDataViewMemory0.buffer.detached === true || cachedDataViewMemory0.buffer.detached === void 0 && cachedDataViewMemory0.buffer !== wasm2.memory.buffer) {
274568
- cachedDataViewMemory0 = new DataView(wasm2.memory.buffer);
274569
- }
274570
- return cachedDataViewMemory0;
274571
- }
274572
- var cachedUint32ArrayMemory0 = null;
274573
- function getUint32ArrayMemory0() {
274574
- if (cachedUint32ArrayMemory0 === null || cachedUint32ArrayMemory0.byteLength === 0) {
274575
- cachedUint32ArrayMemory0 = new Uint32Array(wasm2.memory.buffer);
274576
- }
274577
- return cachedUint32ArrayMemory0;
274578
- }
274579
- function getArrayU32FromWasm0(ptr, len) {
274580
- ptr = ptr >>> 0;
274581
- return getUint32ArrayMemory0().subarray(ptr / 4, ptr / 4 + len);
274582
- }
274583
- function passArray8ToWasm0(arg, malloc) {
274584
- const ptr = malloc(arg.length * 1, 1) >>> 0;
274585
- getUint8ArrayMemory0().set(arg, ptr / 1);
274586
- WASM_VECTOR_LEN = arg.length;
274587
- return ptr;
274588
- }
274589
- function passArray32ToWasm0(arg, malloc) {
274590
- const ptr = malloc(arg.length * 4, 4) >>> 0;
274591
- getUint32ArrayMemory0().set(arg, ptr / 4);
274592
- WASM_VECTOR_LEN = arg.length;
274593
- return ptr;
274594
- }
274595
- function getArrayU8FromWasm0(ptr, len) {
274596
- ptr = ptr >>> 0;
274597
- return getUint8ArrayMemory0().subarray(ptr / 1, ptr / 1 + len);
274598
- }
274599
- module2.exports.get_encoding = function(encoding, extend_special_tokens) {
274600
- if (wasm2 == null) throw new Error("tiktoken: WASM binary has not been propery initialized.");
274601
- try {
274602
- const retptr = wasm2.__wbindgen_add_to_stack_pointer(-16);
274603
- const ptr0 = passStringToWasm0(encoding, wasm2.__wbindgen_export_1, wasm2.__wbindgen_export_2);
274604
- const len0 = WASM_VECTOR_LEN;
274605
- wasm2.get_encoding(retptr, ptr0, len0, addHeapObject(extend_special_tokens));
274606
- var r0 = getDataViewMemory0().getInt32(retptr + 4 * 0, true);
274607
- var r1 = getDataViewMemory0().getInt32(retptr + 4 * 1, true);
274608
- var r22 = getDataViewMemory0().getInt32(retptr + 4 * 2, true);
274609
- if (r22) {
274610
- throw takeObject(r1);
274611
- }
274612
- return Tiktoken.__wrap(r0);
274613
- } finally {
274614
- wasm2.__wbindgen_add_to_stack_pointer(16);
274615
- }
274616
- };
274617
- module2.exports.encoding_for_model = function(model, extend_special_tokens) {
274618
- if (wasm2 == null) throw new Error("tiktoken: WASM binary has not been propery initialized.");
274619
- try {
274620
- const retptr = wasm2.__wbindgen_add_to_stack_pointer(-16);
274621
- const ptr0 = passStringToWasm0(model, wasm2.__wbindgen_export_1, wasm2.__wbindgen_export_2);
274622
- const len0 = WASM_VECTOR_LEN;
274623
- wasm2.encoding_for_model(retptr, ptr0, len0, addHeapObject(extend_special_tokens));
274624
- var r0 = getDataViewMemory0().getInt32(retptr + 4 * 0, true);
274625
- var r1 = getDataViewMemory0().getInt32(retptr + 4 * 1, true);
274626
- var r22 = getDataViewMemory0().getInt32(retptr + 4 * 2, true);
274627
- if (r22) {
274628
- throw takeObject(r1);
274629
- }
274630
- return Tiktoken.__wrap(r0);
274631
- } finally {
274632
- wasm2.__wbindgen_add_to_stack_pointer(16);
274633
- }
274634
- };
274635
- module2.exports.get_encoding_name_for_model = function(model) {
274636
- if (wasm2 == null) throw new Error("tiktoken: WASM binary has not been propery initialized.");
274637
- let deferred3_0;
274638
- let deferred3_1;
274639
- try {
274640
- const retptr = wasm2.__wbindgen_add_to_stack_pointer(-16);
274641
- const ptr0 = passStringToWasm0(model, wasm2.__wbindgen_export_1, wasm2.__wbindgen_export_2);
274642
- const len0 = WASM_VECTOR_LEN;
274643
- wasm2.get_encoding_name_for_model(retptr, ptr0, len0);
274644
- var r0 = getDataViewMemory0().getInt32(retptr + 4 * 0, true);
274645
- var r1 = getDataViewMemory0().getInt32(retptr + 4 * 1, true);
274646
- var r22 = getDataViewMemory0().getInt32(retptr + 4 * 2, true);
274647
- var r3 = getDataViewMemory0().getInt32(retptr + 4 * 3, true);
274648
- var ptr2 = r0;
274649
- var len2 = r1;
274650
- if (r3) {
274651
- ptr2 = 0;
274652
- len2 = 0;
274653
- throw takeObject(r22);
274654
- }
274655
- deferred3_0 = ptr2;
274656
- deferred3_1 = len2;
274657
- return getStringFromWasm0(ptr2, len2);
274658
- } finally {
274659
- wasm2.__wbindgen_add_to_stack_pointer(16);
274660
- wasm2.__wbindgen_export_3(deferred3_0, deferred3_1, 1);
274661
- }
274662
- };
274663
- var TiktokenFinalization = typeof FinalizationRegistry === "undefined" ? { register: () => {
274664
- }, unregister: () => {
274665
- } } : new FinalizationRegistry((ptr) => wasm2.__wbg_tiktoken_free(ptr >>> 0, 1));
274666
- var Tiktoken = class _Tiktoken {
274667
- /**
274668
- * @param {string} tiktoken_bfe
274669
- * @param {any} special_tokens
274670
- * @param {string} pat_str
274671
- */
274672
- constructor(tiktoken_bfe, special_tokens, pat_str) {
274673
- if (wasm2 == null) throw new Error("tiktoken: WASM binary has not been propery initialized.");
274674
- const ptr0 = passStringToWasm0(tiktoken_bfe, wasm2.__wbindgen_export_1, wasm2.__wbindgen_export_2);
274675
- const len0 = WASM_VECTOR_LEN;
274676
- const ptr1 = passStringToWasm0(pat_str, wasm2.__wbindgen_export_1, wasm2.__wbindgen_export_2);
274677
- const len1 = WASM_VECTOR_LEN;
274678
- const ret = wasm2.tiktoken_new(ptr0, len0, addHeapObject(special_tokens), ptr1, len1);
274679
- this.__wbg_ptr = ret >>> 0;
274680
- TiktokenFinalization.register(this, this.__wbg_ptr, this);
274681
- return this;
274682
- }
274683
- /** @returns {string | undefined} */
274684
- get name() {
274685
- try {
274686
- const retptr = wasm2.__wbindgen_add_to_stack_pointer(-16);
274687
- wasm2.tiktoken_name(retptr, this.__wbg_ptr);
274688
- var r0 = getDataViewMemory0().getInt32(retptr + 4 * 0, true);
274689
- var r1 = getDataViewMemory0().getInt32(retptr + 4 * 1, true);
274690
- let v12;
274691
- if (r0 !== 0) {
274692
- v12 = getStringFromWasm0(r0, r1).slice();
274693
- wasm2.__wbindgen_export_3(r0, r1 * 1, 1);
274694
- }
274695
- return v12;
274696
- } finally {
274697
- wasm2.__wbindgen_add_to_stack_pointer(16);
274698
- }
274699
- }
274700
- static __wrap(ptr) {
274701
- ptr = ptr >>> 0;
274702
- const obj = Object.create(_Tiktoken.prototype);
274703
- obj.__wbg_ptr = ptr;
274704
- TiktokenFinalization.register(obj, obj.__wbg_ptr, obj);
274705
- return obj;
274706
- }
274707
- __destroy_into_raw() {
274708
- const ptr = this.__wbg_ptr;
274709
- this.__wbg_ptr = 0;
274710
- TiktokenFinalization.unregister(this);
274711
- return ptr;
274712
- }
274713
- free() {
274714
- if (wasm2 == null) throw new Error("tiktoken: WASM binary has not been propery initialized.");
274715
- const ptr = this.__destroy_into_raw();
274716
- wasm2.__wbg_tiktoken_free(ptr, 0);
274717
- }
274718
- /**
274719
- * @param {string} text
274720
- * @param {any} allowed_special
274721
- * @param {any} disallowed_special
274722
- * @returns {Uint32Array}
274723
- */
274724
- encode(text, allowed_special, disallowed_special) {
274725
- if (wasm2 == null) throw new Error("tiktoken: WASM binary has not been propery initialized.");
274726
- try {
274727
- const retptr = wasm2.__wbindgen_add_to_stack_pointer(-16);
274728
- const ptr0 = passStringToWasm0(text, wasm2.__wbindgen_export_1, wasm2.__wbindgen_export_2);
274729
- const len0 = WASM_VECTOR_LEN;
274730
- wasm2.tiktoken_encode(retptr, this.__wbg_ptr, ptr0, len0, addHeapObject(allowed_special), addHeapObject(disallowed_special));
274731
- var r0 = getDataViewMemory0().getInt32(retptr + 4 * 0, true);
274732
- var r1 = getDataViewMemory0().getInt32(retptr + 4 * 1, true);
274733
- var r22 = getDataViewMemory0().getInt32(retptr + 4 * 2, true);
274734
- var r3 = getDataViewMemory0().getInt32(retptr + 4 * 3, true);
274735
- if (r3) {
274736
- throw takeObject(r22);
274737
- }
274738
- var v2 = getArrayU32FromWasm0(r0, r1).slice();
274739
- wasm2.__wbindgen_export_3(r0, r1 * 4, 4);
274740
- return v2;
274741
- } finally {
274742
- wasm2.__wbindgen_add_to_stack_pointer(16);
274743
- }
274744
- }
274745
- /**
274746
- * @param {string} text
274747
- * @returns {Uint32Array}
274748
- */
274749
- encode_ordinary(text) {
274750
- if (wasm2 == null) throw new Error("tiktoken: WASM binary has not been propery initialized.");
274751
- try {
274752
- const retptr = wasm2.__wbindgen_add_to_stack_pointer(-16);
274753
- const ptr0 = passStringToWasm0(text, wasm2.__wbindgen_export_1, wasm2.__wbindgen_export_2);
274754
- const len0 = WASM_VECTOR_LEN;
274755
- wasm2.tiktoken_encode_ordinary(retptr, this.__wbg_ptr, ptr0, len0);
274756
- var r0 = getDataViewMemory0().getInt32(retptr + 4 * 0, true);
274757
- var r1 = getDataViewMemory0().getInt32(retptr + 4 * 1, true);
274758
- var v2 = getArrayU32FromWasm0(r0, r1).slice();
274759
- wasm2.__wbindgen_export_3(r0, r1 * 4, 4);
274760
- return v2;
274761
- } finally {
274762
- wasm2.__wbindgen_add_to_stack_pointer(16);
274763
- }
274764
- }
274765
- /**
274766
- * @param {string} text
274767
- * @param {any} allowed_special
274768
- * @param {any} disallowed_special
274769
- * @returns {any}
274770
- */
274771
- encode_with_unstable(text, allowed_special, disallowed_special) {
274772
- if (wasm2 == null) throw new Error("tiktoken: WASM binary has not been propery initialized.");
274773
- try {
274774
- const retptr = wasm2.__wbindgen_add_to_stack_pointer(-16);
274775
- const ptr0 = passStringToWasm0(text, wasm2.__wbindgen_export_1, wasm2.__wbindgen_export_2);
274776
- const len0 = WASM_VECTOR_LEN;
274777
- wasm2.tiktoken_encode_with_unstable(retptr, this.__wbg_ptr, ptr0, len0, addHeapObject(allowed_special), addHeapObject(disallowed_special));
274778
- var r0 = getDataViewMemory0().getInt32(retptr + 4 * 0, true);
274779
- var r1 = getDataViewMemory0().getInt32(retptr + 4 * 1, true);
274780
- var r22 = getDataViewMemory0().getInt32(retptr + 4 * 2, true);
274781
- if (r22) {
274782
- throw takeObject(r1);
274783
- }
274784
- return takeObject(r0);
274785
- } finally {
274786
- wasm2.__wbindgen_add_to_stack_pointer(16);
274787
- }
274788
- }
274789
- /**
274790
- * @param {Uint8Array} bytes
274791
- * @returns {number}
274792
- */
274793
- encode_single_token(bytes) {
274794
- if (wasm2 == null) throw new Error("tiktoken: WASM binary has not been propery initialized.");
274795
- const ptr0 = passArray8ToWasm0(bytes, wasm2.__wbindgen_export_1);
274796
- const len0 = WASM_VECTOR_LEN;
274797
- const ret = wasm2.tiktoken_encode_single_token(this.__wbg_ptr, ptr0, len0);
274798
- return ret >>> 0;
274799
- }
274800
- /**
274801
- * @param {Uint32Array} tokens
274802
- * @returns {Uint8Array}
274803
- */
274804
- decode(tokens) {
274805
- if (wasm2 == null) throw new Error("tiktoken: WASM binary has not been propery initialized.");
274806
- try {
274807
- const retptr = wasm2.__wbindgen_add_to_stack_pointer(-16);
274808
- const ptr0 = passArray32ToWasm0(tokens, wasm2.__wbindgen_export_1);
274809
- const len0 = WASM_VECTOR_LEN;
274810
- wasm2.tiktoken_decode(retptr, this.__wbg_ptr, ptr0, len0);
274811
- var r0 = getDataViewMemory0().getInt32(retptr + 4 * 0, true);
274812
- var r1 = getDataViewMemory0().getInt32(retptr + 4 * 1, true);
274813
- var v2 = getArrayU8FromWasm0(r0, r1).slice();
274814
- wasm2.__wbindgen_export_3(r0, r1 * 1, 1);
274815
- return v2;
274816
- } finally {
274817
- wasm2.__wbindgen_add_to_stack_pointer(16);
274818
- }
274819
- }
274820
- /**
274821
- * @param {number} token
274822
- * @returns {Uint8Array}
274823
- */
274824
- decode_single_token_bytes(token2) {
274825
- if (wasm2 == null) throw new Error("tiktoken: WASM binary has not been propery initialized.");
274826
- try {
274827
- const retptr = wasm2.__wbindgen_add_to_stack_pointer(-16);
274828
- wasm2.tiktoken_decode_single_token_bytes(retptr, this.__wbg_ptr, token2);
274829
- var r0 = getDataViewMemory0().getInt32(retptr + 4 * 0, true);
274830
- var r1 = getDataViewMemory0().getInt32(retptr + 4 * 1, true);
274831
- var v12 = getArrayU8FromWasm0(r0, r1).slice();
274832
- wasm2.__wbindgen_export_3(r0, r1 * 1, 1);
274833
- return v12;
274834
- } finally {
274835
- wasm2.__wbindgen_add_to_stack_pointer(16);
274836
- }
274837
- }
274838
- /** @returns {any} */
274839
- token_byte_values() {
274840
- if (wasm2 == null) throw new Error("tiktoken: WASM binary has not been propery initialized.");
274841
- const ret = wasm2.tiktoken_token_byte_values(this.__wbg_ptr);
274842
- return takeObject(ret);
274843
- }
274844
- };
274845
- module2.exports.Tiktoken = Tiktoken;
274846
- module2.exports.__wbg_parse_def2e24ef1252aff = function() {
274847
- return handleError(function(arg0, arg1) {
274848
- const ret = JSON.parse(getStringFromWasm0(arg0, arg1));
274849
- return addHeapObject(ret);
274850
- }, arguments);
274851
- };
274852
- module2.exports.__wbg_stringify_f7ed6987935b4a24 = function() {
274853
- return handleError(function(arg0) {
274854
- const ret = JSON.stringify(getObject2(arg0));
274855
- return addHeapObject(ret);
274856
- }, arguments);
274857
- };
274858
- module2.exports.__wbindgen_error_new = function(arg0, arg1) {
274859
- const ret = new Error(getStringFromWasm0(arg0, arg1));
274860
- return addHeapObject(ret);
274861
- };
274862
- module2.exports.__wbindgen_is_undefined = function(arg0) {
274863
- const ret = getObject2(arg0) === void 0;
274864
- return ret;
274865
- };
274866
- module2.exports.__wbindgen_object_drop_ref = function(arg0) {
274867
- takeObject(arg0);
274868
- };
274869
- module2.exports.__wbindgen_string_get = function(arg0, arg1) {
274870
- if (wasm2 == null) throw new Error("tiktoken: WASM binary has not been propery initialized.");
274871
- const obj = getObject2(arg1);
274872
- const ret = typeof obj === "string" ? obj : void 0;
274873
- var ptr1 = isLikeNone(ret) ? 0 : passStringToWasm0(ret, wasm2.__wbindgen_export_1, wasm2.__wbindgen_export_2);
274874
- var len1 = WASM_VECTOR_LEN;
274875
- getDataViewMemory0().setInt32(arg0 + 4 * 1, len1, true);
274876
- getDataViewMemory0().setInt32(arg0 + 4 * 0, ptr1, true);
274877
- };
274878
- module2.exports.__wbindgen_throw = function(arg0, arg1) {
274879
- throw new Error(getStringFromWasm0(arg0, arg1));
274880
- };
274881
- }
274882
- });
274883
-
274884
- // node_modules/tiktoken/tiktoken.cjs
274885
- var require_tiktoken = __commonJS({
274886
- "node_modules/tiktoken/tiktoken.cjs"(exports2) {
274887
- var wasm2 = require_tiktoken_bg();
274888
- var imports = {};
274889
- imports["./tiktoken_bg.js"] = wasm2;
274890
- var path56 = __require("path");
274891
- var fs54 = __require("fs");
274892
- var candidates = __dirname.split(path56.sep).reduce((memo, _, index, array) => {
274893
- const prefix = array.slice(0, index + 1).join(path56.sep) + path56.sep;
274894
- if (!prefix.includes("node_modules" + path56.sep)) {
274895
- memo.unshift(
274896
- path56.join(
274897
- prefix,
274898
- "node_modules",
274899
- "tiktoken",
274900
- "",
274901
- "./tiktoken_bg.wasm"
274902
- )
274903
- );
274904
- }
274905
- return memo;
274906
- }, []);
274907
- candidates.unshift(path56.join(__dirname, "./tiktoken_bg.wasm"));
274908
- var bytes = null;
274909
- for (const candidate of candidates) {
274910
- try {
274911
- bytes = fs54.readFileSync(candidate);
274912
- break;
274913
- } catch {
274914
- }
274915
- }
274916
- if (bytes == null) throw new Error("Missing tiktoken_bg.wasm");
274917
- var wasmModule = new WebAssembly.Module(bytes);
274918
- var wasmInstance = new WebAssembly.Instance(wasmModule, imports);
274919
- wasm2.__wbg_set_wasm(wasmInstance.exports);
274920
- exports2["get_encoding"] = wasm2["get_encoding"];
274921
- exports2["encoding_for_model"] = wasm2["encoding_for_model"];
274922
- exports2["get_encoding_name_for_model"] = wasm2["get_encoding_name_for_model"];
274923
- exports2["Tiktoken"] = wasm2["Tiktoken"];
274924
- }
274925
- });
274926
-
274927
274475
  // packages/router/dist/src/utils/cache.js
274928
274476
  var LRUCache2, sessionUsageCache;
274929
274477
  var init_cache = __esm({
@@ -276375,18 +275923,18 @@ var init_esm15 = __esm({
276375
275923
  });
276376
275924
 
276377
275925
  // packages/router/dist/src/utils/router.js
275926
+ import { get_encoding } from "tiktoken";
276378
275927
  import { readFile as readFile5, access as access3 } from "fs/promises";
276379
275928
  import { opendir, stat as stat3 } from "fs/promises";
276380
275929
  import { join as join18 } from "path";
276381
- var import_tiktoken, enc, calculateTokenCount, readConfigFile2, getProjectSpecificRouter, getUseModel, router, sessionProjectCache, searchProjectBySession;
275930
+ var enc, calculateTokenCount, readConfigFile2, getProjectSpecificRouter, getUseModel, router, sessionProjectCache, searchProjectBySession;
276382
275931
  var init_router = __esm({
276383
275932
  "packages/router/dist/src/utils/router.js"() {
276384
275933
  "use strict";
276385
- import_tiktoken = __toESM(require_tiktoken(), 1);
276386
275934
  init_cache();
276387
275935
  init_constants2();
276388
275936
  init_esm15();
276389
- enc = (0, import_tiktoken.get_encoding)("cl100k_base");
275937
+ enc = get_encoding("cl100k_base");
276390
275938
  calculateTokenCount = (messages, system, tools) => {
276391
275939
  let tokenCount = 0;
276392
275940
  if (Array.isArray(messages)) {
@@ -276583,7 +276131,7 @@ var require_package7 = __commonJS({
276583
276131
  "packages/router/package.json"(exports2, module2) {
276584
276132
  module2.exports = {
276585
276133
  name: "@mpdai/router",
276586
- version: "0.1.1",
276134
+ version: "0.1.2",
276587
276135
  description: "MPD AI Router - Route Claude Code to other LLM providers",
276588
276136
  type: "module",
276589
276137
  main: "dist/index.js",
@@ -310566,7 +310114,7 @@ import { promises as fs36 } from "fs";
310566
310114
  import path40 from "path";
310567
310115
 
310568
310116
  // packages/cli/src/generated/git-commit.ts
310569
- var GIT_COMMIT_INFO = "cc090d6 (local modifications)";
310117
+ var GIT_COMMIT_INFO = "fe41993 (local modifications)";
310570
310118
 
310571
310119
  // node_modules/read-package-up/index.js
310572
310120
  import path35 from "node:path";
@@ -310779,7 +310327,7 @@ async function getPackageJson() {
310779
310327
  // packages/cli/src/utils/version.ts
310780
310328
  async function getCliVersion() {
310781
310329
  const pkgJson = await getPackageJson();
310782
- return "0.1.35";
310330
+ return "0.1.37";
310783
310331
  }
310784
310332
 
310785
310333
  // packages/cli/src/ui/commands/memoryCommand.ts
@@ -311043,45 +310591,6 @@ async function waitForService(timeout2 = 1e4, initialDelay = 1e3) {
311043
310591
  }
311044
310592
  return false;
311045
310593
  }
311046
- async function handleRouterStart() {
311047
- await run();
311048
- }
311049
- async function handleRouterStop() {
311050
- try {
311051
- const pid = parseInt(readFileSync16(PID_FILE, "utf-8"));
311052
- process.kill(pid);
311053
- cleanupPidFile();
311054
- if (existsSync17(REFERENCE_COUNT_FILE)) {
311055
- try {
311056
- unlinkSync2(REFERENCE_COUNT_FILE);
311057
- } catch (e2) {
311058
- }
311059
- }
311060
- console.log("MPD AI router service has been successfully stopped.");
311061
- } catch (e2) {
311062
- console.log("Failed to stop the service. It may have already been stopped.");
311063
- cleanupPidFile();
311064
- }
311065
- }
311066
- async function handleRouterRestart() {
311067
- try {
311068
- const pid = parseInt(readFileSync16(PID_FILE, "utf-8"));
311069
- process.kill(pid);
311070
- cleanupPidFile();
311071
- if (existsSync17(REFERENCE_COUNT_FILE)) {
311072
- try {
311073
- unlinkSync2(REFERENCE_COUNT_FILE);
311074
- } catch (e2) {
311075
- }
311076
- }
311077
- console.log("MPD AI router service has been stopped.");
311078
- } catch (e2) {
311079
- console.log("Service was not running or failed to stop.");
311080
- cleanupPidFile();
311081
- }
311082
- console.log("Starting MPD AI router service...");
311083
- await run();
311084
- }
311085
310594
  async function handleRouterStatus() {
311086
310595
  await showStatus();
311087
310596
  }
@@ -311102,34 +310611,10 @@ async function handleRouterCode(args) {
311102
310611
  executeCodeCommand(codeArgs);
311103
310612
  }
311104
310613
  }
311105
- async function handleRouterModel() {
311106
- await runModelSelector();
311107
- }
311108
310614
  var routerCommand = {
311109
310615
  name: "router",
311110
310616
  description: "Router commands for managing the MPD AI router service",
311111
310617
  subCommands: [
311112
- {
311113
- name: "start",
311114
- description: "Start the router service",
311115
- action: async (_context, _args) => {
311116
- await handleRouterStart();
311117
- }
311118
- },
311119
- {
311120
- name: "stop",
311121
- description: "Stop the router service",
311122
- action: async (_context, _args) => {
311123
- await handleRouterStop();
311124
- }
311125
- },
311126
- {
311127
- name: "restart",
311128
- description: "Restart the router service",
311129
- action: async (_context, _args) => {
311130
- await handleRouterRestart();
311131
- }
311132
- },
311133
310618
  {
311134
310619
  name: "status",
311135
310620
  description: "Show router service status",
@@ -311143,13 +310628,6 @@ var routerCommand = {
311143
310628
  action: async (_context, args) => {
311144
310629
  await handleRouterCode(args);
311145
310630
  }
311146
- },
311147
- {
311148
- name: "model",
311149
- description: "Interactive model selection and configuration",
311150
- action: async (_context, _args) => {
311151
- await handleRouterModel();
311152
- }
311153
310631
  }
311154
310632
  ]
311155
310633
  };
@@ -332118,7 +331596,7 @@ var DataCollector = class {
332118
331596
  // 提取元数据
332119
331597
  extractMetadata(data) {
332120
331598
  return {
332121
- cli_version: "0.1.35",
331599
+ cli_version: "0.1.37",
332122
331600
  model: process.env.CUSTOM_LLM_MODEL_NAME || "gemini",
332123
331601
  auth_type: process.env.USE_CUSTOM_LLM ? "custom_llm" : "google_oauth",
332124
331602
  project_path: data.projectPath,
@@ -339573,17 +339051,12 @@ async function handleRouterCommand(command, args) {
339573
339051
  Usage: mpdai router [command]
339574
339052
 
339575
339053
  Commands:
339576
- start Start server
339577
- stop Stop server
339578
- restart Restart server
339579
339054
  status Show server status
339580
339055
  code Execute claude command
339581
- model Interactive model selection and configuration
339582
339056
 
339583
339057
  Example:
339584
- mpdai router start
339058
+ mpdai router status
339585
339059
  mpdai router code "Write a Hello World"
339586
- mpdai router model
339587
339060
  `);
339588
339061
  return true;
339589
339062
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "mpd-llm-cli",
3
- "version": "0.1.35",
3
+ "version": "0.1.37",
4
4
  "engines": {
5
5
  "node": ">=20.0.0"
6
6
  },
@@ -13,7 +13,7 @@
13
13
  "url": "git+https://git.rakuten-it.com/scm/mpd-ai/mpd-llm-cli.git"
14
14
  },
15
15
  "config": {
16
- "sandboxImageUri": "xx:0.1.31"
16
+ "sandboxImageUri": "xx:0.1.37"
17
17
  },
18
18
  "scripts": {
19
19
  "start": "node scripts/start.js",
@@ -93,6 +93,7 @@
93
93
  "yargs": "^17.7.2"
94
94
  },
95
95
  "dependencies": {
96
- "@google/genai": "^1.9.0"
96
+ "@google/genai": "^1.9.0",
97
+ "tiktoken": "^1.0.21"
97
98
  }
98
99
  }