@polka-codes/cli 0.4.4 → 0.4.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -4
- package/dist/index.js +199 -9
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -77,12 +77,11 @@ rules: |
|
|
|
77
77
|
|
|
78
78
|
### AI Configuration
|
|
79
79
|
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
Supported providers (in recommended order):
|
|
80
|
+
Supported providers
|
|
83
81
|
1. DeepSeek / `deepseek` (recommended)
|
|
84
82
|
2. Anthropic / `anthropic` (Sonnet 3.5 recommended)
|
|
85
|
-
3.
|
|
83
|
+
3. OpenRouter / `openrouter`
|
|
84
|
+
4. Ollama / `ollama`
|
|
86
85
|
|
|
87
86
|
Configure the AI service by creating a `.env` file in your project root:
|
|
88
87
|
|
package/dist/index.js
CHANGED
|
@@ -24629,7 +24629,7 @@ var {
|
|
|
24629
24629
|
Help
|
|
24630
24630
|
} = import__.default;
|
|
24631
24631
|
// package.json
|
|
24632
|
-
var version = "0.4.
|
|
24632
|
+
var version = "0.4.5";
|
|
24633
24633
|
|
|
24634
24634
|
// ../../node_modules/@anthropic-ai/sdk/version.mjs
|
|
24635
24635
|
var VERSION = "0.36.2";
|
|
@@ -32890,17 +32890,169 @@ class OllamaService extends AiServiceBase {
|
|
|
32890
32890
|
}
|
|
32891
32891
|
}
|
|
32892
32892
|
|
|
32893
|
+
// ../core/src/AiService/OpenRouterService.ts
|
|
32894
|
+
class OpenRouterService extends AiServiceBase {
|
|
32895
|
+
#client;
|
|
32896
|
+
#apiKey;
|
|
32897
|
+
model;
|
|
32898
|
+
constructor(options) {
|
|
32899
|
+
super();
|
|
32900
|
+
if (!options.model) {
|
|
32901
|
+
throw new Error("OpenRouter requires a model");
|
|
32902
|
+
}
|
|
32903
|
+
if (!options.apiKey) {
|
|
32904
|
+
throw new Error("OpenRouter requires an API key");
|
|
32905
|
+
}
|
|
32906
|
+
this.#apiKey = options.apiKey;
|
|
32907
|
+
this.#client = new openai_default({
|
|
32908
|
+
baseURL: "https://openrouter.ai/api/v1",
|
|
32909
|
+
apiKey: options.apiKey,
|
|
32910
|
+
defaultHeaders: {
|
|
32911
|
+
"HTTP-Referer": "https://polka.codes",
|
|
32912
|
+
"X-Title": "Polka Codes"
|
|
32913
|
+
}
|
|
32914
|
+
});
|
|
32915
|
+
this.model = {
|
|
32916
|
+
id: options.model,
|
|
32917
|
+
info: {}
|
|
32918
|
+
};
|
|
32919
|
+
}
|
|
32920
|
+
async* send(systemPrompt, messages) {
|
|
32921
|
+
const openAiMessages = [
|
|
32922
|
+
{ role: "system", content: systemPrompt },
|
|
32923
|
+
...convertToOpenAiMessages(messages)
|
|
32924
|
+
];
|
|
32925
|
+
switch (this.model.id) {
|
|
32926
|
+
case "anthropic/claude-3.5-sonnet":
|
|
32927
|
+
case "anthropic/claude-3.5-sonnet:beta":
|
|
32928
|
+
case "anthropic/claude-3.5-sonnet-20240620":
|
|
32929
|
+
case "anthropic/claude-3.5-sonnet-20240620:beta":
|
|
32930
|
+
case "anthropic/claude-3-5-haiku":
|
|
32931
|
+
case "anthropic/claude-3-5-haiku:beta":
|
|
32932
|
+
case "anthropic/claude-3-5-haiku-20241022":
|
|
32933
|
+
case "anthropic/claude-3-5-haiku-20241022:beta":
|
|
32934
|
+
case "anthropic/claude-3-haiku":
|
|
32935
|
+
case "anthropic/claude-3-haiku:beta":
|
|
32936
|
+
case "anthropic/claude-3-opus":
|
|
32937
|
+
case "anthropic/claude-3-opus:beta": {
|
|
32938
|
+
openAiMessages[0] = {
|
|
32939
|
+
role: "system",
|
|
32940
|
+
content: [
|
|
32941
|
+
{
|
|
32942
|
+
type: "text",
|
|
32943
|
+
text: systemPrompt,
|
|
32944
|
+
cache_control: { type: "ephemeral" }
|
|
32945
|
+
}
|
|
32946
|
+
]
|
|
32947
|
+
};
|
|
32948
|
+
const lastTwoUserMessages = openAiMessages.filter((msg) => msg.role === "user").slice(-2);
|
|
32949
|
+
for (const msg of lastTwoUserMessages) {
|
|
32950
|
+
if (typeof msg.content === "string") {
|
|
32951
|
+
msg.content = [{ type: "text", text: msg.content }];
|
|
32952
|
+
}
|
|
32953
|
+
if (Array.isArray(msg.content)) {
|
|
32954
|
+
let lastTextPart = msg.content.filter((part) => part.type === "text").pop();
|
|
32955
|
+
if (!lastTextPart) {
|
|
32956
|
+
lastTextPart = { type: "text", text: "..." };
|
|
32957
|
+
msg.content.push(lastTextPart);
|
|
32958
|
+
}
|
|
32959
|
+
lastTextPart.cache_control = { type: "ephemeral" };
|
|
32960
|
+
}
|
|
32961
|
+
}
|
|
32962
|
+
break;
|
|
32963
|
+
}
|
|
32964
|
+
default:
|
|
32965
|
+
break;
|
|
32966
|
+
}
|
|
32967
|
+
let maxTokens;
|
|
32968
|
+
switch (this.model.id) {
|
|
32969
|
+
case "anthropic/claude-3.5-sonnet":
|
|
32970
|
+
case "anthropic/claude-3.5-sonnet:beta":
|
|
32971
|
+
case "anthropic/claude-3.5-sonnet-20240620":
|
|
32972
|
+
case "anthropic/claude-3.5-sonnet-20240620:beta":
|
|
32973
|
+
case "anthropic/claude-3-5-haiku":
|
|
32974
|
+
case "anthropic/claude-3-5-haiku:beta":
|
|
32975
|
+
case "anthropic/claude-3-5-haiku-20241022":
|
|
32976
|
+
case "anthropic/claude-3-5-haiku-20241022:beta":
|
|
32977
|
+
maxTokens = 8192;
|
|
32978
|
+
break;
|
|
32979
|
+
}
|
|
32980
|
+
let shouldApplyMiddleOutTransform = !this.model.info.supportsPromptCache;
|
|
32981
|
+
if (this.model.id === "deepseek/deepseek-chat") {
|
|
32982
|
+
shouldApplyMiddleOutTransform = true;
|
|
32983
|
+
}
|
|
32984
|
+
const stream = await this.#client.chat.completions.create({
|
|
32985
|
+
model: this.model.id,
|
|
32986
|
+
max_completion_tokens: maxTokens,
|
|
32987
|
+
messages: openAiMessages,
|
|
32988
|
+
temperature: 0,
|
|
32989
|
+
stream: true,
|
|
32990
|
+
transforms: shouldApplyMiddleOutTransform ? ["middle-out"] : undefined,
|
|
32991
|
+
include_reasoning: true
|
|
32992
|
+
});
|
|
32993
|
+
let genId;
|
|
32994
|
+
for await (const chunk of stream) {
|
|
32995
|
+
if ("error" in chunk) {
|
|
32996
|
+
const error = chunk.error;
|
|
32997
|
+
console.error(`OpenRouter API Error: ${error?.code} - ${error?.message}`);
|
|
32998
|
+
throw new Error(`OpenRouter API Error ${error?.code}: ${error?.message}`);
|
|
32999
|
+
}
|
|
33000
|
+
if (!genId && chunk.id) {
|
|
33001
|
+
genId = chunk.id;
|
|
33002
|
+
}
|
|
33003
|
+
const delta = chunk.choices[0]?.delta;
|
|
33004
|
+
if (delta?.reasoning) {
|
|
33005
|
+
yield {
|
|
33006
|
+
type: "reasoning",
|
|
33007
|
+
text: delta.reasoning
|
|
33008
|
+
};
|
|
33009
|
+
}
|
|
33010
|
+
if (delta?.content) {
|
|
33011
|
+
yield {
|
|
33012
|
+
type: "text",
|
|
33013
|
+
text: delta.content
|
|
33014
|
+
};
|
|
33015
|
+
}
|
|
33016
|
+
}
|
|
33017
|
+
await new Promise((resolve) => setTimeout(resolve, 1000));
|
|
33018
|
+
const controller = new AbortController;
|
|
33019
|
+
const timeout = setTimeout(() => controller.abort(), 5000);
|
|
33020
|
+
try {
|
|
33021
|
+
const response = await fetch(`https://openrouter.ai/api/v1/generation?id=${genId}`, {
|
|
33022
|
+
headers: {
|
|
33023
|
+
Authorization: `Bearer ${this.#apiKey}`
|
|
33024
|
+
},
|
|
33025
|
+
signal: controller.signal
|
|
33026
|
+
});
|
|
33027
|
+
const responseBody = await response.json();
|
|
33028
|
+
const generation = responseBody.data;
|
|
33029
|
+
yield {
|
|
33030
|
+
type: "usage",
|
|
33031
|
+
inputTokens: generation?.native_tokens_prompt || 0,
|
|
33032
|
+
outputTokens: generation?.native_tokens_completion || 0,
|
|
33033
|
+
totalCost: generation?.total_cost || 0
|
|
33034
|
+
};
|
|
33035
|
+
} catch (error) {
|
|
33036
|
+
console.error("Error fetching OpenRouter generation details:", error);
|
|
33037
|
+
} finally {
|
|
33038
|
+
clearTimeout(timeout);
|
|
33039
|
+
}
|
|
33040
|
+
}
|
|
33041
|
+
}
|
|
33042
|
+
|
|
32893
33043
|
// ../core/src/AiService/index.ts
|
|
32894
33044
|
var AiServiceProvider;
|
|
32895
33045
|
((AiServiceProvider2) => {
|
|
32896
33046
|
AiServiceProvider2["Anthropic"] = "anthropic";
|
|
32897
33047
|
AiServiceProvider2["Ollama"] = "ollama";
|
|
32898
33048
|
AiServiceProvider2["DeepSeek"] = "deepseek";
|
|
33049
|
+
AiServiceProvider2["OpenRouter"] = "openrouter";
|
|
32899
33050
|
})(AiServiceProvider ||= {});
|
|
32900
33051
|
var defaultModels = {
|
|
32901
33052
|
["anthropic" /* Anthropic */]: "claude-3-5-sonnet-20241022",
|
|
32902
33053
|
["ollama" /* Ollama */]: "maryasov/qwen2.5-coder-cline:7b",
|
|
32903
|
-
["deepseek" /* DeepSeek */]: "deepseek-chat"
|
|
33054
|
+
["deepseek" /* DeepSeek */]: "deepseek-chat",
|
|
33055
|
+
["openrouter" /* OpenRouter */]: "anthropic/claude-3.5-sonnet"
|
|
32904
33056
|
};
|
|
32905
33057
|
var createService = (provider, options) => {
|
|
32906
33058
|
switch (provider) {
|
|
@@ -32910,6 +33062,8 @@ var createService = (provider, options) => {
|
|
|
32910
33062
|
return new OllamaService(options);
|
|
32911
33063
|
case "deepseek" /* DeepSeek */:
|
|
32912
33064
|
return new DeepSeekService(options);
|
|
33065
|
+
case "openrouter" /* OpenRouter */:
|
|
33066
|
+
return new OpenRouterService(options);
|
|
32913
33067
|
}
|
|
32914
33068
|
};
|
|
32915
33069
|
// ../core/src/tool.ts
|
|
@@ -33191,6 +33345,9 @@ ${agents}`;
|
|
|
33191
33345
|
callback = () => {
|
|
33192
33346
|
}
|
|
33193
33347
|
}) {
|
|
33348
|
+
if (maxIterations < 1) {
|
|
33349
|
+
throw new Error("Max iterations must be greater than 0");
|
|
33350
|
+
}
|
|
33194
33351
|
const taskInfo = {
|
|
33195
33352
|
options: {
|
|
33196
33353
|
maxIterations
|
|
@@ -33430,6 +33587,9 @@ var getStringArray = (args, name, defaultValue) => {
|
|
|
33430
33587
|
}
|
|
33431
33588
|
return defaultValue;
|
|
33432
33589
|
}
|
|
33590
|
+
if (ret === "") {
|
|
33591
|
+
return [];
|
|
33592
|
+
}
|
|
33433
33593
|
return ret.split(",");
|
|
33434
33594
|
};
|
|
33435
33595
|
var getBoolean = (args, name, defaultValue) => {
|
|
@@ -34392,19 +34552,24 @@ class MultiAgent {
|
|
|
34392
34552
|
get model() {
|
|
34393
34553
|
return this.#activeAgent?.model;
|
|
34394
34554
|
}
|
|
34395
|
-
async#startTask(agentName, task, context, callback) {
|
|
34555
|
+
async#startTask(agentName, task, context, maxIterations, callback) {
|
|
34396
34556
|
this.#activeAgent = await this.#config.createAgent(agentName);
|
|
34397
34557
|
const [exitReason, info] = await this.#activeAgent.startTask({
|
|
34398
34558
|
task,
|
|
34399
34559
|
context,
|
|
34560
|
+
maxIterations,
|
|
34400
34561
|
callback
|
|
34401
34562
|
});
|
|
34402
34563
|
if (typeof exitReason === "string") {
|
|
34403
34564
|
return [exitReason, info];
|
|
34404
34565
|
}
|
|
34405
34566
|
if (exitReason.type === "HandOver") {
|
|
34567
|
+
const remainIteration = maxIterations - Math.floor(info.messages.length / 2);
|
|
34568
|
+
if (remainIteration < 1) {
|
|
34569
|
+
return ["MaxIterations", info];
|
|
34570
|
+
}
|
|
34406
34571
|
const context2 = await this.#config.getContext(agentName, exitReason.context, exitReason.files);
|
|
34407
|
-
const [exitReason2, info2] = await this.#startTask(exitReason.agentName, exitReason.task, context2, callback);
|
|
34572
|
+
const [exitReason2, info2] = await this.#startTask(exitReason.agentName, exitReason.task, context2, remainIteration, callback);
|
|
34408
34573
|
info2.inputTokens += info.inputTokens;
|
|
34409
34574
|
info2.outputTokens += info.outputTokens;
|
|
34410
34575
|
info2.cacheWriteTokens += info.cacheWriteTokens;
|
|
@@ -34418,7 +34583,8 @@ class MultiAgent {
|
|
|
34418
34583
|
if (this.#activeAgent) {
|
|
34419
34584
|
throw new Error("An active agent already exists");
|
|
34420
34585
|
}
|
|
34421
|
-
|
|
34586
|
+
const maxIterations = options.maxIterations ?? 50;
|
|
34587
|
+
return this.#startTask(options.agentName, options.task, options.context, maxIterations, options.callback);
|
|
34422
34588
|
}
|
|
34423
34589
|
async continueTask(userMessage, taskInfo, callback = () => {
|
|
34424
34590
|
}) {
|
|
@@ -34490,6 +34656,8 @@ ${output}`);
|
|
|
34490
34656
|
|
|
34491
34657
|
// ../core/src/AiTool/generateGithubPullRequestDetails.ts
|
|
34492
34658
|
var prompt2 = `
|
|
34659
|
+
# Generate Github Pull Request Details
|
|
34660
|
+
|
|
34493
34661
|
You are given:
|
|
34494
34662
|
- A branch name in <tool_input_branch_name>.
|
|
34495
34663
|
- An optional context message in <tool_input_context> (which may or may not be present).
|
|
@@ -34502,10 +34670,30 @@ Your task:
|
|
|
34502
34670
|
3. Produce a single GitHub Pull Request title.
|
|
34503
34671
|
4. Produce a Pull Request description that explains the changes.
|
|
34504
34672
|
|
|
34673
|
+
Use the following template for the Pull Request description:
|
|
34674
|
+
|
|
34675
|
+
---
|
|
34676
|
+
**Context (if provided)**:
|
|
34677
|
+
- Acknowledge any guiding concerns or instructions.
|
|
34678
|
+
|
|
34679
|
+
**Summary of Changes**:
|
|
34680
|
+
- Provide a concise list or overview of what changed.
|
|
34681
|
+
|
|
34682
|
+
**Highlights of Changed Code**:
|
|
34683
|
+
- Mention only the specific sections or functionalities updated, without showing full surrounding context.
|
|
34684
|
+
|
|
34685
|
+
**Additional Information (if needed)**:
|
|
34686
|
+
- Testing steps (if applicable).
|
|
34687
|
+
- Any notes or caveats.
|
|
34688
|
+
|
|
34689
|
+
---
|
|
34690
|
+
|
|
34505
34691
|
Output format:
|
|
34506
34692
|
<tool_output>
|
|
34507
34693
|
<tool_output_pr_title>YOUR PR TITLE HERE</tool_output_pr_title>
|
|
34508
|
-
<tool_output_pr_description>
|
|
34694
|
+
<tool_output_pr_description>
|
|
34695
|
+
YOUR PR DESCRIPTION HERE
|
|
34696
|
+
</tool_output_pr_description>
|
|
34509
34697
|
</tool_output>
|
|
34510
34698
|
|
|
34511
34699
|
Below is an **example** of the input and output:
|
|
@@ -34527,7 +34715,7 @@ diff --git a/order_service.py b/order_service.py
|
|
|
34527
34715
|
- if is_valid_order(order):
|
|
34528
34716
|
- process_order(order)
|
|
34529
34717
|
+ validate_and_process(order)
|
|
34530
|
-
|
|
34718
|
+
</tool_input_commit_diff>
|
|
34531
34719
|
</tool_input>
|
|
34532
34720
|
|
|
34533
34721
|
Example Output:
|
|
@@ -34541,8 +34729,7 @@ to use the new validate_and_process method for improved maintainability.
|
|
|
34541
34729
|
|
|
34542
34730
|
---
|
|
34543
34731
|
|
|
34544
|
-
Use the above format whenever you receive
|
|
34545
|
-
Only highlight the changed code and avoid including the context around the changes in the description.
|
|
34732
|
+
Use the above format whenever you receive <tool_input> that may include a branch name, an optional context, aggregated commit messages in a single tag, and a combined diff in a single tag. Provide your final output strictly in <tool_output> with <tool_output_pr_title> and <tool_output_pr_description>. Only highlight the changed code and avoid including the context around the changes in the description.
|
|
34546
34733
|
`;
|
|
34547
34734
|
var generateGithubPullRequestDetails_default = {
|
|
34548
34735
|
name: "generateGithubPullRequestDetails",
|
|
@@ -41257,6 +41444,9 @@ async function configPrompt(existingConfig) {
|
|
|
41257
41444
|
case "deepseek" /* DeepSeek */:
|
|
41258
41445
|
model = deepSeekDefaultModelId;
|
|
41259
41446
|
break;
|
|
41447
|
+
case "openrouter" /* OpenRouter */:
|
|
41448
|
+
model = await esm_default3({ message: "Enter Model ID (Visit https://openrouter.ai/models for available models):" });
|
|
41449
|
+
break;
|
|
41260
41450
|
}
|
|
41261
41451
|
let apiKey;
|
|
41262
41452
|
if (provider2 !== "ollama" /* Ollama */) {
|