osagent 0.2.74 → 0.2.75

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/cli.js +23 -12
  2. package/package.json +1 -1
package/cli.js CHANGED
@@ -133087,7 +133087,7 @@ var init_geminiContentGenerator = __esm({
133087
133087
  }
133088
133088
  models;
133089
133089
  constructor(contentGeneratorConfig, config2) {
133090
- const version3 = "0.2.74";
133090
+ const version3 = "0.2.75";
133091
133091
  const userAgent2 = `OSAgent/${version3} (${process.platform}; ${process.arch})`;
133092
133092
  let headers = {
133093
133093
  "User-Agent": userAgent2
@@ -143450,6 +143450,7 @@ var init_pipeline = __esm({
143450
143450
  init_esbuild_shims();
143451
143451
  init_node();
143452
143452
  init_converter2();
143453
+ init_uiTelemetry();
143453
143454
  ContentGenerationPipeline = class {
143454
143455
  static {
143455
143456
  __name(this, "ContentGenerationPipeline");
@@ -143470,6 +143471,9 @@ var init_pipeline = __esm({
143470
143471
  signal: request4.config?.abortSignal
143471
143472
  });
143472
143473
  const OSAResponse = this.converter.convertOpenAIResponseToOSA(openaiResponse);
143474
+ if (OSAResponse.usageMetadata?.promptTokenCount !== void 0) {
143475
+ uiTelemetryService.setLastPromptTokenCount(OSAResponse.usageMetadata.promptTokenCount);
143476
+ }
143473
143477
  await this.config.telemetryService.logSuccess(context2, OSAResponse, openaiRequest, openaiResponse);
143474
143478
  return OSAResponse;
143475
143479
  });
@@ -143495,7 +143499,7 @@ var init_pipeline = __esm({
143495
143499
  const collectedOSAResponses = [];
143496
143500
  const collectedOpenAIChunks = [];
143497
143501
  this.converter.resetStreamingToolCalls();
143498
- let pendingFinishResponse = null;
143502
+ const pendingState = { response: null };
143499
143503
  try {
143500
143504
  for await (const chunk of stream2) {
143501
143505
  collectedOpenAIChunks.push(chunk);
@@ -143504,19 +143508,26 @@ var init_pipeline = __esm({
143504
143508
  continue;
143505
143509
  }
143506
143510
  const shouldYield = this.handleChunkMerging(response, collectedOSAResponses, (mergedResponse) => {
143507
- pendingFinishResponse = mergedResponse;
143511
+ pendingState.response = mergedResponse;
143508
143512
  });
143509
143513
  if (shouldYield) {
143510
- if (pendingFinishResponse) {
143511
- yield pendingFinishResponse;
143512
- pendingFinishResponse = null;
143514
+ const responseToYield = pendingState.response || response;
143515
+ if (responseToYield.usageMetadata?.promptTokenCount !== void 0) {
143516
+ uiTelemetryService.setLastPromptTokenCount(responseToYield.usageMetadata.promptTokenCount);
143517
+ }
143518
+ if (pendingState.response) {
143519
+ yield pendingState.response;
143520
+ pendingState.response = null;
143513
143521
  } else {
143514
143522
  yield response;
143515
143523
  }
143516
143524
  }
143517
143525
  }
143518
- if (pendingFinishResponse) {
143519
- yield pendingFinishResponse;
143526
+ if (pendingState.response) {
143527
+ if (pendingState.response.usageMetadata?.promptTokenCount !== void 0) {
143528
+ uiTelemetryService.setLastPromptTokenCount(pendingState.response.usageMetadata.promptTokenCount);
143529
+ }
143530
+ yield pendingState.response;
143520
143531
  }
143521
143532
  if (this.converter.hasPendingToolCalls()) {
143522
143533
  const incompletePartsList = this.converter.finalizeIncompleteToolCalls();
@@ -152996,7 +153007,7 @@ function createContentGeneratorConfig(config2, authType, generationConfig) {
152996
153007
  };
152997
153008
  }
152998
153009
  async function createContentGenerator(config2, gcConfig, sessionId2, isInitialAuth) {
152999
- const version3 = "0.2.74";
153010
+ const version3 = "0.2.75";
153000
153011
  const userAgent2 = `OSAgent/${version3} (${process.platform}; ${process.arch})`;
153001
153012
  const baseHeaders = {
153002
153013
  "User-Agent": userAgent2
@@ -340481,7 +340492,7 @@ __name(getPackageJson, "getPackageJson");
340481
340492
  // packages/cli/src/utils/version.ts
340482
340493
  async function getCliVersion() {
340483
340494
  const pkgJson = await getPackageJson();
340484
- return "0.2.74";
340495
+ return "0.2.75";
340485
340496
  }
340486
340497
  __name(getCliVersion, "getCliVersion");
340487
340498
 
@@ -344682,8 +344693,8 @@ var formatDuration = /* @__PURE__ */ __name((milliseconds) => {
344682
344693
 
344683
344694
  // packages/cli/src/generated/git-commit.ts
344684
344695
  init_esbuild_shims();
344685
- var GIT_COMMIT_INFO2 = "5db4b6a";
344686
- var CLI_VERSION2 = "0.2.74";
344696
+ var GIT_COMMIT_INFO2 = "e2c7312";
344697
+ var CLI_VERSION2 = "0.2.75";
344687
344698
 
344688
344699
  // packages/cli/src/utils/systemInfo.ts
344689
344700
  async function getNpmVersion() {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "osagent",
3
- "version": "0.2.74",
3
+ "version": "0.2.75",
4
4
  "description": "OS Agent - AI-powered CLI for autonomous coding with Ollama Cloud and Qwen models",
5
5
  "repository": {
6
6
  "type": "git",