@polka-codes/cli 0.7.9 → 0.7.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +56 -56
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -24629,7 +24629,62 @@ var {
|
|
|
24629
24629
|
Help
|
|
24630
24630
|
} = import__.default;
|
|
24631
24631
|
// package.json
|
|
24632
|
-
var version = "0.7.
|
|
24632
|
+
var version = "0.7.10";
|
|
24633
|
+
|
|
24634
|
+
// ../core/src/AiService/AiServiceBase.ts
|
|
24635
|
+
class AiServiceBase {
|
|
24636
|
+
usageMeter;
|
|
24637
|
+
constructor(usageMeter) {
|
|
24638
|
+
this.usageMeter = usageMeter;
|
|
24639
|
+
}
|
|
24640
|
+
async* send(systemPrompt, messages) {
|
|
24641
|
+
this.usageMeter.incrementMessageCount();
|
|
24642
|
+
const stream = this.sendImpl(systemPrompt, messages);
|
|
24643
|
+
for await (const chunk of stream) {
|
|
24644
|
+
switch (chunk.type) {
|
|
24645
|
+
case "usage":
|
|
24646
|
+
this.usageMeter.addUsage(chunk, this.model);
|
|
24647
|
+
break;
|
|
24648
|
+
}
|
|
24649
|
+
yield chunk;
|
|
24650
|
+
}
|
|
24651
|
+
}
|
|
24652
|
+
async request(systemPrompt, messages) {
|
|
24653
|
+
this.usageMeter.incrementMessageCount();
|
|
24654
|
+
const stream = this.sendImpl(systemPrompt, messages);
|
|
24655
|
+
const usage = {
|
|
24656
|
+
inputTokens: 0,
|
|
24657
|
+
outputTokens: 0,
|
|
24658
|
+
cacheWriteTokens: 0,
|
|
24659
|
+
cacheReadTokens: 0,
|
|
24660
|
+
totalCost: 0
|
|
24661
|
+
};
|
|
24662
|
+
let resp = "";
|
|
24663
|
+
let reasoning = "";
|
|
24664
|
+
for await (const chunk of stream) {
|
|
24665
|
+
switch (chunk.type) {
|
|
24666
|
+
case "usage":
|
|
24667
|
+
usage.inputTokens = chunk.inputTokens ?? 0;
|
|
24668
|
+
usage.outputTokens = chunk.outputTokens ?? 0;
|
|
24669
|
+
usage.cacheWriteTokens = chunk.cacheWriteTokens ?? 0;
|
|
24670
|
+
usage.cacheReadTokens = chunk.cacheReadTokens ?? 0;
|
|
24671
|
+
usage.totalCost = chunk.totalCost;
|
|
24672
|
+
break;
|
|
24673
|
+
case "text":
|
|
24674
|
+
resp += chunk.text;
|
|
24675
|
+
break;
|
|
24676
|
+
case "reasoning":
|
|
24677
|
+
reasoning += chunk.text;
|
|
24678
|
+
}
|
|
24679
|
+
}
|
|
24680
|
+
this.usageMeter.addUsage(usage, this.model);
|
|
24681
|
+
return {
|
|
24682
|
+
response: resp,
|
|
24683
|
+
reasoning,
|
|
24684
|
+
usage
|
|
24685
|
+
};
|
|
24686
|
+
}
|
|
24687
|
+
}
|
|
24633
24688
|
|
|
24634
24689
|
// ../../node_modules/@anthropic-ai/sdk/version.mjs
|
|
24635
24690
|
var VERSION = "0.39.0";
|
|
@@ -27984,61 +28039,6 @@ Anthropic.Models = Models2;
|
|
|
27984
28039
|
Anthropic.ModelInfosPage = ModelInfosPage;
|
|
27985
28040
|
Anthropic.Beta = Beta;
|
|
27986
28041
|
|
|
27987
|
-
// ../core/src/AiService/AiServiceBase.ts
|
|
27988
|
-
class AiServiceBase {
|
|
27989
|
-
usageMeter;
|
|
27990
|
-
constructor(usageMeter) {
|
|
27991
|
-
this.usageMeter = usageMeter;
|
|
27992
|
-
}
|
|
27993
|
-
async* send(systemPrompt, messages) {
|
|
27994
|
-
this.usageMeter.incrementMessageCount();
|
|
27995
|
-
const stream = this.sendImpl(systemPrompt, messages);
|
|
27996
|
-
for await (const chunk of stream) {
|
|
27997
|
-
switch (chunk.type) {
|
|
27998
|
-
case "usage":
|
|
27999
|
-
this.usageMeter.addUsage(chunk, this.model);
|
|
28000
|
-
break;
|
|
28001
|
-
}
|
|
28002
|
-
yield chunk;
|
|
28003
|
-
}
|
|
28004
|
-
}
|
|
28005
|
-
async request(systemPrompt, messages) {
|
|
28006
|
-
this.usageMeter.incrementMessageCount();
|
|
28007
|
-
const stream = this.sendImpl(systemPrompt, messages);
|
|
28008
|
-
const usage = {
|
|
28009
|
-
inputTokens: 0,
|
|
28010
|
-
outputTokens: 0,
|
|
28011
|
-
cacheWriteTokens: 0,
|
|
28012
|
-
cacheReadTokens: 0,
|
|
28013
|
-
totalCost: 0
|
|
28014
|
-
};
|
|
28015
|
-
let resp = "";
|
|
28016
|
-
let reasoning = "";
|
|
28017
|
-
for await (const chunk of stream) {
|
|
28018
|
-
switch (chunk.type) {
|
|
28019
|
-
case "usage":
|
|
28020
|
-
usage.inputTokens = chunk.inputTokens ?? 0;
|
|
28021
|
-
usage.outputTokens = chunk.outputTokens ?? 0;
|
|
28022
|
-
usage.cacheWriteTokens = chunk.cacheWriteTokens ?? 0;
|
|
28023
|
-
usage.cacheReadTokens = chunk.cacheReadTokens ?? 0;
|
|
28024
|
-
usage.totalCost = chunk.totalCost;
|
|
28025
|
-
break;
|
|
28026
|
-
case "text":
|
|
28027
|
-
resp += chunk.text;
|
|
28028
|
-
break;
|
|
28029
|
-
case "reasoning":
|
|
28030
|
-
reasoning += chunk.text;
|
|
28031
|
-
}
|
|
28032
|
-
}
|
|
28033
|
-
this.usageMeter.addUsage(usage, this.model);
|
|
28034
|
-
return {
|
|
28035
|
-
response: resp,
|
|
28036
|
-
reasoning,
|
|
28037
|
-
usage
|
|
28038
|
-
};
|
|
28039
|
-
}
|
|
28040
|
-
}
|
|
28041
|
-
|
|
28042
28042
|
// ../core/src/AiService/ModelInfo.ts
|
|
28043
28043
|
var anthropicDefaultModelId = "claude-3-7-sonnet-20250219";
|
|
28044
28044
|
var anthropicModels = {
|