@codeflyai/codefly 0.24.2 → 0.24.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bundle/codefly.js +562 -182
- package/package.json +1 -1
package/bundle/codefly.js
CHANGED
|
@@ -92847,6 +92847,9 @@ function isAutoModel(model) {
|
|
|
92847
92847
|
function supportsMultimodalFunctionResponse(model) {
|
|
92848
92848
|
return model.startsWith("gemini-3-");
|
|
92849
92849
|
}
|
|
92850
|
+
function isGeminiModel(model) {
|
|
92851
|
+
return model.startsWith("gemini-") || model.startsWith("google/") || /^gemini-\d/.test(model);
|
|
92852
|
+
}
|
|
92850
92853
|
var PREVIEW_CODEFLY_MODEL, PREVIEW_CODEFLY_FLASH_MODEL, DEFAULT_CODEFLY_MODEL, DEFAULT_CODEFLY_FLASH_MODEL, LEGACY_GEMINI_2_5_PRO, LEGACY_GEMINI_2_5_FLASH, DEFAULT_GEMINI_FLASH_LITE_MODEL, VALID_CODEFLY_MODELS, PREVIEW_CODEFLY_MODEL_AUTO, DEFAULT_CODEFLY_MODEL_AUTO, CODEFLY_MODEL_ALIAS_AUTO, CODEFLY_MODEL_ALIAS_PRO, CODEFLY_MODEL_ALIAS_FLASH, CODEFLY_MODEL_ALIAS_FLASH_LITE, DEFAULT_CODEFLY_EMBEDDING_MODEL, DEFAULT_THINKING_MODE;
|
|
92851
92854
|
var init_models = __esm({
|
|
92852
92855
|
"packages/core/dist/src/config/models.js"() {
|
|
@@ -111157,11 +111160,15 @@ var init_read_package_up = __esm({
|
|
|
111157
111160
|
|
|
111158
111161
|
// packages/core/dist/src/utils/package.js
|
|
111159
111162
|
async function getPackageJson(cwd7) {
|
|
111160
|
-
|
|
111161
|
-
|
|
111163
|
+
try {
|
|
111164
|
+
const result2 = await readPackageUp({ cwd: cwd7 });
|
|
111165
|
+
if (!result2) {
|
|
111166
|
+
return void 0;
|
|
111167
|
+
}
|
|
111168
|
+
return result2.packageJson;
|
|
111169
|
+
} catch (_e) {
|
|
111162
111170
|
return void 0;
|
|
111163
111171
|
}
|
|
111164
|
-
return result2.packageJson;
|
|
111165
111172
|
}
|
|
111166
111173
|
var init_package = __esm({
|
|
111167
111174
|
"packages/core/dist/src/utils/package.js"() {
|
|
@@ -111218,8 +111225,8 @@ var init_channel = __esm({
|
|
|
111218
111225
|
import { fileURLToPath as fileURLToPath4 } from "node:url";
|
|
111219
111226
|
import path12 from "node:path";
|
|
111220
111227
|
async function getVersion() {
|
|
111221
|
-
if ("0.24.
|
|
111222
|
-
return "0.24.
|
|
111228
|
+
if ("0.24.4") {
|
|
111229
|
+
return "0.24.4";
|
|
111223
111230
|
}
|
|
111224
111231
|
const pkgJson = await getPackageJson(__dirname3);
|
|
111225
111232
|
return pkgJson?.version || "unknown";
|
|
@@ -185928,8 +185935,8 @@ var GIT_COMMIT_INFO, CLI_VERSION;
|
|
|
185928
185935
|
var init_git_commit = __esm({
|
|
185929
185936
|
"packages/core/dist/src/generated/git-commit.js"() {
|
|
185930
185937
|
"use strict";
|
|
185931
|
-
GIT_COMMIT_INFO = "
|
|
185932
|
-
CLI_VERSION = "0.24.
|
|
185938
|
+
GIT_COMMIT_INFO = "6575fcec5";
|
|
185939
|
+
CLI_VERSION = "0.24.4";
|
|
185933
185940
|
}
|
|
185934
185941
|
});
|
|
185935
185942
|
|
|
@@ -187383,6 +187390,10 @@ function logApiRequest(config2, event) {
|
|
|
187383
187390
|
logger5.emit(event.toLogRecord(config2));
|
|
187384
187391
|
logger5.emit(event.toSemanticLogRecord(config2));
|
|
187385
187392
|
});
|
|
187393
|
+
const inputMessages = event.prompt.contents ? toInputMessages(event.prompt.contents) : [];
|
|
187394
|
+
coreEvents.emitConsoleLog("debug", `[API Request] Model: ${event.model}
|
|
187395
|
+
Prompt ID: ${event.prompt.prompt_id}
|
|
187396
|
+
Messages: ${JSON.stringify(inputMessages, null, 2)}`);
|
|
187386
187397
|
}
|
|
187387
187398
|
function logFlashFallback(config2, event) {
|
|
187388
187399
|
ClearcutLogger.getInstance(config2)?.logFlashFallbackEvent();
|
|
@@ -187432,6 +187443,11 @@ function logApiError(config2, event) {
|
|
|
187432
187443
|
}
|
|
187433
187444
|
});
|
|
187434
187445
|
});
|
|
187446
|
+
const inputMessages = event.prompt.contents ? toInputMessages(event.prompt.contents) : [];
|
|
187447
|
+
coreEvents.emitConsoleLog("error", `[API Error] Model: ${event.model}
|
|
187448
|
+
Error: ${event.error}
|
|
187449
|
+
Duration: ${event.duration_ms}ms
|
|
187450
|
+
Messages: ${JSON.stringify(inputMessages, null, 2)}`);
|
|
187435
187451
|
}
|
|
187436
187452
|
function logApiResponse(config2, event) {
|
|
187437
187453
|
const uiEvent = {
|
|
@@ -187445,6 +187461,12 @@ function logApiResponse(config2, event) {
|
|
|
187445
187461
|
const logger5 = import_api_logs.logs.getLogger(SERVICE_NAME);
|
|
187446
187462
|
logger5.emit(event.toLogRecord(config2));
|
|
187447
187463
|
logger5.emit(event.toSemanticLogRecord(config2));
|
|
187464
|
+
const outputMessages = toOutputMessages(event.response.candidates);
|
|
187465
|
+
coreEvents.emitConsoleLog("debug", `[API Response] Model: ${event.model}
|
|
187466
|
+
Status: ${event.status_code}
|
|
187467
|
+
Duration: ${event.duration_ms}ms
|
|
187468
|
+
Usage: ${JSON.stringify(event.usage, null, 2)}
|
|
187469
|
+
Messages: ${JSON.stringify(outputMessages, null, 2)}`);
|
|
187448
187470
|
const conventionAttributes = getConventionAttributes(event);
|
|
187449
187471
|
recordApiResponseMetrics(config2, event.duration_ms, {
|
|
187450
187472
|
model: event.model,
|
|
@@ -187786,6 +187808,8 @@ var init_loggers = __esm({
|
|
|
187786
187808
|
import_api_logs = __toESM(require_src12(), 1);
|
|
187787
187809
|
init_constants();
|
|
187788
187810
|
init_types6();
|
|
187811
|
+
init_events();
|
|
187812
|
+
init_semantic();
|
|
187789
187813
|
init_metrics2();
|
|
187790
187814
|
init_sdk();
|
|
187791
187815
|
init_uiTelemetry();
|
|
@@ -188387,8 +188411,9 @@ var OpenAICompatibleContentGenerator;
|
|
|
188387
188411
|
var init_openaiCompatibleContentGenerator = __esm({
|
|
188388
188412
|
"packages/core/dist/src/core/openaiCompatibleContentGenerator.js"() {
|
|
188389
188413
|
"use strict";
|
|
188390
|
-
init_node();
|
|
188391
188414
|
init_converter2();
|
|
188415
|
+
init_events();
|
|
188416
|
+
init_node();
|
|
188392
188417
|
OpenAICompatibleContentGenerator = class {
|
|
188393
188418
|
config;
|
|
188394
188419
|
baseUrl;
|
|
@@ -188405,26 +188430,105 @@ var init_openaiCompatibleContentGenerator = __esm({
|
|
|
188405
188430
|
return apiKey;
|
|
188406
188431
|
}
|
|
188407
188432
|
mapContentToOpenAIMessages(contents) {
|
|
188408
|
-
|
|
188433
|
+
const messages = [];
|
|
188434
|
+
for (const content of contents) {
|
|
188409
188435
|
let role = content.role === "model" ? "assistant" : "user";
|
|
188410
188436
|
if (role === "user" && !content.role) {
|
|
188411
188437
|
role = "user";
|
|
188412
188438
|
}
|
|
188413
|
-
const
|
|
188439
|
+
const parts2 = content.parts || [];
|
|
188440
|
+
const textParts = parts2.filter((part) => part.text).map((part) => part.text).join("");
|
|
188441
|
+
const toolCalls = parts2.filter((part) => part.functionCall).map((part) => ({
|
|
188442
|
+
id: part.functionCall.id || `call_${Math.random().toString(36).slice(2, 11)}`,
|
|
188443
|
+
type: "function",
|
|
188444
|
+
function: {
|
|
188445
|
+
name: part.functionCall.name || "unknown",
|
|
188446
|
+
arguments: JSON.stringify(part.functionCall.args)
|
|
188447
|
+
}
|
|
188448
|
+
}));
|
|
188449
|
+
const functionResponseParts = parts2.filter((part) => part.functionResponse);
|
|
188450
|
+
if (functionResponseParts.length > 0) {
|
|
188451
|
+
for (const part of functionResponseParts) {
|
|
188452
|
+
messages.push({
|
|
188453
|
+
role: "tool",
|
|
188454
|
+
content: JSON.stringify(part.functionResponse.response),
|
|
188455
|
+
tool_call_id: part.functionResponse.id || part.functionResponse.name
|
|
188456
|
+
});
|
|
188457
|
+
}
|
|
188458
|
+
} else {
|
|
188459
|
+
messages.push({
|
|
188460
|
+
role,
|
|
188461
|
+
content: textParts || null,
|
|
188462
|
+
tool_calls: toolCalls.length > 0 ? toolCalls : void 0
|
|
188463
|
+
});
|
|
188464
|
+
}
|
|
188465
|
+
}
|
|
188466
|
+
return messages;
|
|
188467
|
+
}
|
|
188468
|
+
mapToolsToOpenAITools(tools) {
|
|
188469
|
+
if (!tools || !Array.isArray(tools))
|
|
188470
|
+
return void 0;
|
|
188471
|
+
const openAITools = [];
|
|
188472
|
+
for (const tool of tools) {
|
|
188473
|
+
const toolObj = tool;
|
|
188474
|
+
if (toolObj.functionDeclarations) {
|
|
188475
|
+
for (const fd of toolObj.functionDeclarations) {
|
|
188476
|
+
openAITools.push({
|
|
188477
|
+
type: "function",
|
|
188478
|
+
function: {
|
|
188479
|
+
name: fd.name,
|
|
188480
|
+
description: fd.description,
|
|
188481
|
+
parameters: fd.parameters || fd.parametersJsonSchema || {}
|
|
188482
|
+
}
|
|
188483
|
+
});
|
|
188484
|
+
}
|
|
188485
|
+
}
|
|
188486
|
+
}
|
|
188487
|
+
return openAITools.length > 0 ? openAITools : void 0;
|
|
188488
|
+
}
|
|
188489
|
+
mapToolConfigToOpenAIToolChoice(toolConfig) {
|
|
188490
|
+
if (!toolConfig || !toolConfig.functionCallingConfig)
|
|
188491
|
+
return void 0;
|
|
188492
|
+
const mode = toolConfig.functionCallingConfig.mode;
|
|
188493
|
+
if (mode === "ANY")
|
|
188494
|
+
return "required";
|
|
188495
|
+
if (mode === "NONE")
|
|
188496
|
+
return "none";
|
|
188497
|
+
if (mode === "AUTO")
|
|
188498
|
+
return "auto";
|
|
188499
|
+
if (toolConfig.functionCallingConfig.allowedFunctionNames && toolConfig.functionCallingConfig.allowedFunctionNames.length > 0) {
|
|
188414
188500
|
return {
|
|
188415
|
-
|
|
188416
|
-
|
|
188501
|
+
type: "function",
|
|
188502
|
+
function: {
|
|
188503
|
+
name: toolConfig.functionCallingConfig.allowedFunctionNames[0]
|
|
188504
|
+
}
|
|
188417
188505
|
};
|
|
188418
|
-
}
|
|
188506
|
+
}
|
|
188507
|
+
return void 0;
|
|
188419
188508
|
}
|
|
188420
188509
|
async generateContent(request3, _userPromptId) {
|
|
188421
188510
|
const contents = toContents2(request3.contents);
|
|
188422
188511
|
const messages = this.mapContentToOpenAIMessages(contents);
|
|
188512
|
+
if (request3.config?.systemInstruction) {
|
|
188513
|
+
const systemContent = toContents2(request3.config.systemInstruction)[0];
|
|
188514
|
+
const systemText = (systemContent.parts || []).filter((p) => p.text).map((p) => p.text).join("");
|
|
188515
|
+
if (systemText) {
|
|
188516
|
+
messages.unshift({
|
|
188517
|
+
role: "system",
|
|
188518
|
+
content: systemText
|
|
188519
|
+
});
|
|
188520
|
+
}
|
|
188521
|
+
}
|
|
188423
188522
|
const token2 = this.getAuthToken();
|
|
188424
188523
|
const requestBody = {
|
|
188425
|
-
model:
|
|
188426
|
-
messages
|
|
188427
|
-
|
|
188524
|
+
model: this.resolveModelName(request3.model),
|
|
188525
|
+
messages,
|
|
188526
|
+
tools: this.mapToolsToOpenAITools(request3.config?.tools),
|
|
188527
|
+
tool_choice: this.mapToolConfigToOpenAIToolChoice(request3.config?.toolConfig),
|
|
188528
|
+
temperature: request3.config?.temperature,
|
|
188529
|
+
top_p: request3.config?.topP,
|
|
188530
|
+
max_tokens: request3.config?.maxOutputTokens,
|
|
188531
|
+
stop: request3.config?.stopSequences
|
|
188428
188532
|
};
|
|
188429
188533
|
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
188430
188534
|
method: "POST",
|
|
@@ -188440,12 +188544,42 @@ var init_openaiCompatibleContentGenerator = __esm({
|
|
|
188440
188544
|
}
|
|
188441
188545
|
const data = await response.json();
|
|
188442
188546
|
const choice2 = data.choices[0];
|
|
188547
|
+
const parts2 = [];
|
|
188548
|
+
if (choice2.message.reasoning_content) {
|
|
188549
|
+
parts2.push({
|
|
188550
|
+
text: choice2.message.reasoning_content,
|
|
188551
|
+
thought: true
|
|
188552
|
+
});
|
|
188553
|
+
parts2.push({
|
|
188554
|
+
text: `<think>
|
|
188555
|
+
${choice2.message.reasoning_content}
|
|
188556
|
+
</think>
|
|
188557
|
+
|
|
188558
|
+
`
|
|
188559
|
+
});
|
|
188560
|
+
}
|
|
188561
|
+
if (choice2.message.content) {
|
|
188562
|
+
parts2.push({ text: choice2.message.content });
|
|
188563
|
+
}
|
|
188564
|
+
if (choice2.message.tool_calls) {
|
|
188565
|
+
for (const tc of choice2.message.tool_calls) {
|
|
188566
|
+
if (tc.function?.name && tc.function?.arguments) {
|
|
188567
|
+
parts2.push({
|
|
188568
|
+
functionCall: {
|
|
188569
|
+
name: tc.function.name,
|
|
188570
|
+
args: JSON.parse(tc.function.arguments),
|
|
188571
|
+
id: tc.id
|
|
188572
|
+
}
|
|
188573
|
+
});
|
|
188574
|
+
}
|
|
188575
|
+
}
|
|
188576
|
+
}
|
|
188443
188577
|
const genResponse = {
|
|
188444
188578
|
candidates: [
|
|
188445
188579
|
{
|
|
188446
188580
|
content: {
|
|
188447
188581
|
role: "model",
|
|
188448
|
-
parts:
|
|
188582
|
+
parts: parts2
|
|
188449
188583
|
},
|
|
188450
188584
|
finishReason: choice2.finish_reason,
|
|
188451
188585
|
index: choice2.index
|
|
@@ -188462,11 +188596,30 @@ var init_openaiCompatibleContentGenerator = __esm({
|
|
|
188462
188596
|
async generateContentStream(request3, _userPromptId) {
|
|
188463
188597
|
const contents = toContents2(request3.contents);
|
|
188464
188598
|
const messages = this.mapContentToOpenAIMessages(contents);
|
|
188599
|
+
if (request3.config?.systemInstruction) {
|
|
188600
|
+
const systemContent = toContents2(request3.config.systemInstruction)[0];
|
|
188601
|
+
const systemText = (systemContent.parts || []).filter((p) => p.text).map((p) => p.text).join("");
|
|
188602
|
+
if (systemText) {
|
|
188603
|
+
messages.unshift({
|
|
188604
|
+
role: "system",
|
|
188605
|
+
content: systemText
|
|
188606
|
+
});
|
|
188607
|
+
}
|
|
188608
|
+
}
|
|
188465
188609
|
const token2 = this.getAuthToken();
|
|
188466
188610
|
const requestBody = {
|
|
188467
|
-
model:
|
|
188611
|
+
model: this.resolveModelName(request3.model),
|
|
188468
188612
|
messages,
|
|
188469
|
-
stream: true
|
|
188613
|
+
stream: true,
|
|
188614
|
+
tools: this.mapToolsToOpenAITools(request3.config?.tools),
|
|
188615
|
+
tool_choice: this.mapToolConfigToOpenAIToolChoice(request3.config?.toolConfig),
|
|
188616
|
+
temperature: request3.config?.temperature,
|
|
188617
|
+
top_p: request3.config?.topP,
|
|
188618
|
+
max_tokens: request3.config?.maxOutputTokens,
|
|
188619
|
+
stop: request3.config?.stopSequences,
|
|
188620
|
+
stream_options: {
|
|
188621
|
+
include_usage: true
|
|
188622
|
+
}
|
|
188470
188623
|
};
|
|
188471
188624
|
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
188472
188625
|
method: "POST",
|
|
@@ -188487,39 +188640,171 @@ var init_openaiCompatibleContentGenerator = __esm({
|
|
|
188487
188640
|
const decoder = new TextDecoder();
|
|
188488
188641
|
return async function* () {
|
|
188489
188642
|
let buffer = "";
|
|
188643
|
+
let isThinking = false;
|
|
188644
|
+
const toolCallsAccumulator = /* @__PURE__ */ new Map();
|
|
188490
188645
|
while (true) {
|
|
188491
188646
|
const { done, value } = await reader.read();
|
|
188492
188647
|
if (done)
|
|
188493
188648
|
break;
|
|
188494
|
-
|
|
188649
|
+
const chunkText = decoder.decode(value, { stream: true });
|
|
188650
|
+
buffer += chunkText;
|
|
188495
188651
|
const lines = buffer.split("\n");
|
|
188496
188652
|
buffer = lines.pop() || "";
|
|
188497
188653
|
for (const line of lines) {
|
|
188498
|
-
|
|
188654
|
+
const trimmedLine = line.trim();
|
|
188655
|
+
if (trimmedLine === "" || trimmedLine === "data: [DONE]")
|
|
188499
188656
|
continue;
|
|
188500
|
-
if (
|
|
188501
|
-
|
|
188502
|
-
|
|
188657
|
+
if (trimmedLine.startsWith("data: ")) {
|
|
188658
|
+
const jsonStr = trimmedLine.slice(6);
|
|
188659
|
+
coreEvents.emitConsoleLog("debug", `[OpenAI Stream] ${jsonStr}`);
|
|
188503
188660
|
try {
|
|
188504
|
-
const data = JSON.parse(
|
|
188661
|
+
const data = JSON.parse(jsonStr);
|
|
188662
|
+
if (data.usage) {
|
|
188663
|
+
const genResponse = {
|
|
188664
|
+
candidates: [],
|
|
188665
|
+
usageMetadata: {
|
|
188666
|
+
promptTokenCount: data.usage.prompt_tokens,
|
|
188667
|
+
candidatesTokenCount: data.usage.completion_tokens,
|
|
188668
|
+
totalTokenCount: data.usage.total_tokens
|
|
188669
|
+
}
|
|
188670
|
+
};
|
|
188671
|
+
yield Object.setPrototypeOf(genResponse, GenerateContentResponse.prototype);
|
|
188672
|
+
}
|
|
188505
188673
|
const choice2 = data.choices[0];
|
|
188506
|
-
|
|
188507
|
-
|
|
188674
|
+
if (!choice2)
|
|
188675
|
+
continue;
|
|
188676
|
+
const delta = choice2.delta;
|
|
188677
|
+
const finishReason = choice2.finish_reason;
|
|
188678
|
+
if (delta.reasoning_content) {
|
|
188679
|
+
const parts2 = [];
|
|
188680
|
+
if (!isThinking) {
|
|
188681
|
+
isThinking = true;
|
|
188682
|
+
parts2.push({ text: "<think>\n" });
|
|
188683
|
+
}
|
|
188684
|
+
parts2.push({ text: delta.reasoning_content });
|
|
188685
|
+
parts2.push({
|
|
188686
|
+
text: delta.reasoning_content,
|
|
188687
|
+
thought: true
|
|
188688
|
+
});
|
|
188508
188689
|
const genResponse = {
|
|
188509
188690
|
candidates: [
|
|
188510
188691
|
{
|
|
188511
188692
|
content: {
|
|
188512
188693
|
role: "model",
|
|
188513
|
-
parts:
|
|
188694
|
+
parts: parts2
|
|
188514
188695
|
},
|
|
188515
|
-
|
|
188696
|
+
finish_reason: finishReason ? finishReason : void 0,
|
|
188516
188697
|
index: choice2.index
|
|
188517
188698
|
}
|
|
188518
188699
|
]
|
|
188519
188700
|
};
|
|
188520
188701
|
yield Object.setPrototypeOf(genResponse, GenerateContentResponse.prototype);
|
|
188521
188702
|
}
|
|
188522
|
-
|
|
188703
|
+
if (delta.content) {
|
|
188704
|
+
const parts2 = [];
|
|
188705
|
+
if (isThinking) {
|
|
188706
|
+
isThinking = false;
|
|
188707
|
+
parts2.push({ text: "\n</think>\n\n" });
|
|
188708
|
+
}
|
|
188709
|
+
const content = delta.content;
|
|
188710
|
+
const toolCallRegex = /<tool_call>(.*?)<arg_key>(.*?)<\/arg_key><arg_value>(.*?)<\/arg_value><\/tool_call>/gs;
|
|
188711
|
+
let match2;
|
|
188712
|
+
let lastIndex = 0;
|
|
188713
|
+
let foundToolCall = false;
|
|
188714
|
+
while ((match2 = toolCallRegex.exec(content)) !== null) {
|
|
188715
|
+
foundToolCall = true;
|
|
188716
|
+
if (match2.index > lastIndex) {
|
|
188717
|
+
parts2.push({ text: content.slice(lastIndex, match2.index) });
|
|
188718
|
+
}
|
|
188719
|
+
const toolName = match2[1].trim();
|
|
188720
|
+
const key = match2[2].trim();
|
|
188721
|
+
const value2 = match2[3].trim();
|
|
188722
|
+
parts2.push({
|
|
188723
|
+
functionCall: {
|
|
188724
|
+
name: toolName,
|
|
188725
|
+
args: { [key]: value2 },
|
|
188726
|
+
id: `xml_${Math.random().toString(36).slice(2, 11)}`
|
|
188727
|
+
}
|
|
188728
|
+
});
|
|
188729
|
+
lastIndex = toolCallRegex.lastIndex;
|
|
188730
|
+
}
|
|
188731
|
+
if (foundToolCall) {
|
|
188732
|
+
if (lastIndex < content.length) {
|
|
188733
|
+
parts2.push({ text: content.slice(lastIndex) });
|
|
188734
|
+
}
|
|
188735
|
+
} else {
|
|
188736
|
+
parts2.push({ text: content });
|
|
188737
|
+
}
|
|
188738
|
+
const genResponse = {
|
|
188739
|
+
candidates: [
|
|
188740
|
+
{
|
|
188741
|
+
content: {
|
|
188742
|
+
role: "model",
|
|
188743
|
+
parts: parts2
|
|
188744
|
+
},
|
|
188745
|
+
finishReason: finishReason ? finishReason : void 0,
|
|
188746
|
+
index: choice2.index
|
|
188747
|
+
}
|
|
188748
|
+
]
|
|
188749
|
+
};
|
|
188750
|
+
yield Object.setPrototypeOf(genResponse, GenerateContentResponse.prototype);
|
|
188751
|
+
}
|
|
188752
|
+
if (delta.tool_calls) {
|
|
188753
|
+
for (const tc of delta.tool_calls) {
|
|
188754
|
+
const index = tc.index ?? 0;
|
|
188755
|
+
if (!toolCallsAccumulator.has(index)) {
|
|
188756
|
+
toolCallsAccumulator.set(index, { arguments: "" });
|
|
188757
|
+
}
|
|
188758
|
+
const acc = toolCallsAccumulator.get(index);
|
|
188759
|
+
if (tc.id)
|
|
188760
|
+
acc.id = tc.id;
|
|
188761
|
+
if (tc.function?.name)
|
|
188762
|
+
acc.name = tc.function.name;
|
|
188763
|
+
if (tc.function?.arguments) {
|
|
188764
|
+
acc.arguments += tc.function.arguments;
|
|
188765
|
+
}
|
|
188766
|
+
}
|
|
188767
|
+
}
|
|
188768
|
+
if (finishReason === "tool_calls" || finishReason === "stop") {
|
|
188769
|
+
if (toolCallsAccumulator.size > 0) {
|
|
188770
|
+
const parts2 = [];
|
|
188771
|
+
for (const acc of toolCallsAccumulator.values()) {
|
|
188772
|
+
if (acc.name && acc.arguments) {
|
|
188773
|
+
try {
|
|
188774
|
+
const args2 = JSON.parse(acc.arguments);
|
|
188775
|
+
parts2.push({
|
|
188776
|
+
functionCall: {
|
|
188777
|
+
name: acc.name,
|
|
188778
|
+
args: args2,
|
|
188779
|
+
id: acc.id
|
|
188780
|
+
}
|
|
188781
|
+
});
|
|
188782
|
+
coreEvents.emitConsoleLog("debug", `[OpenAI Tool Call] Yielding: ${acc.name}(${acc.arguments})`);
|
|
188783
|
+
} catch (_e) {
|
|
188784
|
+
coreEvents.emitConsoleLog("error", `[OpenAI Tool Call] Failed to parse arguments for ${acc.name}: ${acc.arguments}`);
|
|
188785
|
+
}
|
|
188786
|
+
}
|
|
188787
|
+
}
|
|
188788
|
+
if (parts2.length > 0) {
|
|
188789
|
+
const genResponse = {
|
|
188790
|
+
candidates: [
|
|
188791
|
+
{
|
|
188792
|
+
content: {
|
|
188793
|
+
role: "model",
|
|
188794
|
+
parts: parts2
|
|
188795
|
+
},
|
|
188796
|
+
finishReason,
|
|
188797
|
+
index: choice2.index
|
|
188798
|
+
}
|
|
188799
|
+
]
|
|
188800
|
+
};
|
|
188801
|
+
yield Object.setPrototypeOf(genResponse, GenerateContentResponse.prototype);
|
|
188802
|
+
}
|
|
188803
|
+
toolCallsAccumulator.clear();
|
|
188804
|
+
}
|
|
188805
|
+
}
|
|
188806
|
+
} catch (_e) {
|
|
188807
|
+
coreEvents.emitConsoleLog("error", `[OpenAI Stream] Failed to parse JSON: ${line}`);
|
|
188523
188808
|
}
|
|
188524
188809
|
}
|
|
188525
188810
|
}
|
|
@@ -188535,6 +188820,20 @@ var init_openaiCompatibleContentGenerator = __esm({
|
|
|
188535
188820
|
async embedContent(_request) {
|
|
188536
188821
|
throw new Error("embedContent not implemented for OpenAI compatible provider yet.");
|
|
188537
188822
|
}
|
|
188823
|
+
/**
|
|
188824
|
+
* Resolves the model name to use for the request.
|
|
188825
|
+
* If a Gemini model name is requested (usually from an internal alias like 'classifier'),
|
|
188826
|
+
* but the provider is OpenAI-compatible, we redirect it to the configured OpenAI model
|
|
188827
|
+
* unless the configured OpenAI model itself looks like a Gemini model (which might happen
|
|
188828
|
+
* if the user is using a proxy specifically for Gemini).
|
|
188829
|
+
*/
|
|
188830
|
+
resolveModelName(requestedModel) {
|
|
188831
|
+
const model = requestedModel || this.config.model;
|
|
188832
|
+
if (model.startsWith("gemini-") && !this.config.model.startsWith("gemini-")) {
|
|
188833
|
+
return this.config.model;
|
|
188834
|
+
}
|
|
188835
|
+
return model;
|
|
188836
|
+
}
|
|
188538
188837
|
};
|
|
188539
188838
|
}
|
|
188540
188839
|
});
|
|
@@ -188570,7 +188869,7 @@ async function createContentGeneratorConfig(config2, authType) {
|
|
|
188570
188869
|
if (apiKey) {
|
|
188571
188870
|
contentGeneratorConfig.apiKey = apiKey;
|
|
188572
188871
|
contentGeneratorConfig.baseUrl = config2.openaiConfig?.baseUrl || openaiBaseUrl || "https://api.openai.com/v1";
|
|
188573
|
-
contentGeneratorConfig.model = openaiModel || config2.openaiConfig?.model ||
|
|
188872
|
+
contentGeneratorConfig.model = openaiModel || config2.openaiConfig?.model || "gpt-4o";
|
|
188574
188873
|
return contentGeneratorConfig;
|
|
188575
188874
|
}
|
|
188576
188875
|
}
|
|
@@ -224054,7 +224353,13 @@ async function getProcessTableWindows() {
|
|
|
224054
224353
|
async function getProcessInfo(pid) {
|
|
224055
224354
|
try {
|
|
224056
224355
|
const command2 = `ps -o ppid=,command= -p ${pid}`;
|
|
224057
|
-
const
|
|
224356
|
+
const timeoutPromise = new Promise((_2, reject) => {
|
|
224357
|
+
setTimeout(() => reject(new Error("Process info lookup timed out")), 2e3);
|
|
224358
|
+
});
|
|
224359
|
+
const { stdout } = await Promise.race([
|
|
224360
|
+
execAsync(command2),
|
|
224361
|
+
timeoutPromise
|
|
224362
|
+
]);
|
|
224058
224363
|
const trimmedStdout = stdout.trim();
|
|
224059
224364
|
if (!trimmedStdout) {
|
|
224060
224365
|
return { parentPid: 0, name: "", command: "" };
|
|
@@ -290425,7 +290730,8 @@ var init_retry = __esm({
|
|
|
290425
290730
|
"EPIPE",
|
|
290426
290731
|
"ENOTFOUND",
|
|
290427
290732
|
"EAI_AGAIN",
|
|
290428
|
-
"ECONNREFUSED"
|
|
290733
|
+
"ECONNREFUSED",
|
|
290734
|
+
"ERR_STREAM_PREMATURE_CLOSE"
|
|
290429
290735
|
];
|
|
290430
290736
|
FETCH_FAILED_MESSAGE = "fetch failed";
|
|
290431
290737
|
}
|
|
@@ -319351,7 +319657,11 @@ var init_nextSpeakerChecker = __esm({
|
|
|
319351
319657
|
});
|
|
319352
319658
|
|
|
319353
319659
|
// packages/core/dist/src/core/tokenLimits.js
|
|
319354
|
-
function tokenLimit(model) {
|
|
319660
|
+
function tokenLimit(model, config2) {
|
|
319661
|
+
const userLimit = config2?.getOpenaiContextWindowLimit();
|
|
319662
|
+
if (userLimit && userLimit > 0) {
|
|
319663
|
+
return userLimit;
|
|
319664
|
+
}
|
|
319355
319665
|
switch (model) {
|
|
319356
319666
|
case PREVIEW_CODEFLY_MODEL:
|
|
319357
319667
|
case PREVIEW_CODEFLY_FLASH_MODEL:
|
|
@@ -319360,6 +319670,9 @@ function tokenLimit(model) {
|
|
|
319360
319670
|
case DEFAULT_GEMINI_FLASH_LITE_MODEL:
|
|
319361
319671
|
return 1048576;
|
|
319362
319672
|
default:
|
|
319673
|
+
if (model && model.toLowerCase().includes("glm-4")) {
|
|
319674
|
+
return 1048576;
|
|
319675
|
+
}
|
|
319363
319676
|
return DEFAULT_TOKEN_LIMIT;
|
|
319364
319677
|
}
|
|
319365
319678
|
}
|
|
@@ -320778,7 +321091,7 @@ var init_client2 = __esm({
|
|
|
320778
321091
|
if (compressed.compressionStatus === CompressionStatus.COMPRESSED) {
|
|
320779
321092
|
yield { type: CodeflyEventType.ChatCompressed, value: compressed };
|
|
320780
321093
|
}
|
|
320781
|
-
const remainingTokenCount = tokenLimit(modelForLimitCheck) - this.getChat().getLastPromptTokenCount();
|
|
321094
|
+
const remainingTokenCount = tokenLimit(modelForLimitCheck, this.config) - this.getChat().getLastPromptTokenCount();
|
|
320782
321095
|
const estimatedRequestTokenCount = await calculateRequestTokenCount(request3, this.getContentGeneratorOrFail(), modelForLimitCheck);
|
|
320783
321096
|
if (estimatedRequestTokenCount > remainingTokenCount) {
|
|
320784
321097
|
yield {
|
|
@@ -330655,21 +330968,21 @@ var init_cli_help_agent = __esm({
|
|
|
330655
330968
|
init_zod();
|
|
330656
330969
|
init_get_internal_docs();
|
|
330657
330970
|
CliHelpReportSchema = external_exports.object({
|
|
330658
|
-
answer: external_exports.string().describe("The detailed answer to the user question about
|
|
330971
|
+
answer: external_exports.string().describe("The detailed answer to the user question about Codefly CLI."),
|
|
330659
330972
|
sources: external_exports.array(external_exports.string()).describe("The documentation files used to answer the question.")
|
|
330660
330973
|
});
|
|
330661
330974
|
CliHelpAgent = (config2) => ({
|
|
330662
330975
|
name: "cli_help",
|
|
330663
330976
|
kind: "local",
|
|
330664
330977
|
displayName: "CLI Help Agent",
|
|
330665
|
-
description: "Specialized in answering questions about how users use you, (
|
|
330978
|
+
description: "Specialized in answering questions about how users use you, (Codefly CLI): features, documentation, and current runtime configuration.",
|
|
330666
330979
|
inputConfig: {
|
|
330667
330980
|
inputSchema: {
|
|
330668
330981
|
type: "object",
|
|
330669
330982
|
properties: {
|
|
330670
330983
|
question: {
|
|
330671
330984
|
type: "string",
|
|
330672
|
-
description: "The specific question about
|
|
330985
|
+
description: "The specific question about Codefly CLI."
|
|
330673
330986
|
}
|
|
330674
330987
|
},
|
|
330675
330988
|
required: ["question"]
|
|
@@ -330700,8 +331013,8 @@ var init_cli_help_agent = __esm({
|
|
|
330700
331013
|
tools: [new GetInternalDocsTool(config2.getMessageBus())]
|
|
330701
331014
|
},
|
|
330702
331015
|
promptConfig: {
|
|
330703
|
-
query: "Your task is to answer the following question about
|
|
330704
|
-
systemPrompt: "You are **CLI Help Agent**, an expert on
|
|
331016
|
+
query: "Your task is to answer the following question about Codefly CLI:\n<question>\n${question}\n</question>",
|
|
331017
|
+
systemPrompt: "You are **CLI Help Agent**, an expert on Codefly CLI. Your purpose is to provide accurate information about Codefly CLI's features, configuration, and current state.\n\n### Runtime Context\n- **CLI Version:** ${cliVersion}\n- **Active Model:** ${activeModel}\n- **Today's Date:** ${today}\n\n" + (config2.isAgentsEnabled() ? '### Sub-Agents (Local & Remote)\nUser defined sub-agents are defined in `.codefly/agents/` or `~/.codefly/agents/` as .md files. **CRITICAL:** These files **MUST** start with YAML frontmatter enclosed in triple-dashes `---`, for example:\n\n```yaml\n---\nname: my-agent\n---\n```\n\nWithout this mandatory frontmatter, the agent will not be discovered or loaded by Codefly CLI. The Markdown body following the frontmatter becomes the agent\'s system prompt (`system_prompt`). Always reference the types and properties outlined here directly when answering questions about sub-agents.\n- **Local Agent:** `kind = "local"`, `name`, `description`, `system_prompt`, and optional `tools`, `model`, `temperate`, `max_turns`, `timeout_mins`.\n- **Remote Agent (A2A):** `kind = "remote"`, `name`, `agent_card_url`. Remote Agents do not use `system_prompt`. Multiple remote agents can be defined by using a YAML array at the top level of the frontmatter. **Note:** When users ask about "remote agents", they are referring to this Agent2Agent functionality, which is completely distinct from MCP servers.\n- **Agent Names:** Must be valid slugs (lowercase letters, numbers, hyphens, and underscores only).\n- **User Commands:** The user can manage agents using `/agents list` to see all available agents and `/agents refresh` to reload the registry after modifying definition files. You (the agent) cannot run these commands.\n\n' : "") + "### Instructions\n1. **Explore Documentation**: Use the `get_internal_docs` tool to find answers. If you don't know where to start, call `get_internal_docs()` without arguments to see the full list of available documentation files.\n2. **Be Precise**: Use the provided runtime context and documentation to give exact answers.\n3. **Cite Sources**: Always include the specific documentation files you used in your final report.\n4. **Non-Interactive**: You operate in a loop and cannot ask the user for more info. If the question is ambiguous, answer as best as you can with the information available.\n\n" + (config2.language && config2.language !== "auto" ? `CRITICAL: You MUST respond in ${config2.language}.
|
|
330705
331018
|
` : "") + "You MUST call `complete_task` with a JSON report containing your `answer` and the `sources` you used."
|
|
330706
331019
|
}
|
|
330707
331020
|
});
|
|
@@ -340973,7 +341286,7 @@ var init_config3 = __esm({
|
|
|
340973
341286
|
this.setHasAccessToPreviewModel(true);
|
|
340974
341287
|
}
|
|
340975
341288
|
if (!this.hasAccessToPreviewModel && isPreviewModel(this.model)) {
|
|
340976
|
-
this.setModel(DEFAULT_CODEFLY_MODEL_AUTO);
|
|
341289
|
+
await this.setModel(DEFAULT_CODEFLY_MODEL_AUTO);
|
|
340977
341290
|
}
|
|
340978
341291
|
await this.ensureExperimentsLoaded();
|
|
340979
341292
|
const adminControlsEnabled = this.experiments?.flags[ExperimentFlags.ENABLE_ADMIN_CONTROLS]?.boolValue ?? false;
|
|
@@ -341051,12 +341364,21 @@ var init_config3 = __esm({
|
|
|
341051
341364
|
return this.contentGeneratorConfig;
|
|
341052
341365
|
}
|
|
341053
341366
|
getModel() {
|
|
341054
|
-
return this.
|
|
341367
|
+
return this._activeModel;
|
|
341055
341368
|
}
|
|
341056
|
-
setModel(newModel, isTemporary = true) {
|
|
341369
|
+
async setModel(newModel, isTemporary = true) {
|
|
341057
341370
|
if (this.model !== newModel || this._activeModel !== newModel) {
|
|
341058
|
-
|
|
341371
|
+
if (!isTemporary) {
|
|
341372
|
+
this.model = newModel;
|
|
341373
|
+
}
|
|
341059
341374
|
this._activeModel = newModel;
|
|
341375
|
+
const currentAuthType = this.contentGeneratorConfig?.authType;
|
|
341376
|
+
const isNewModelGemini = isGeminiModel(newModel) || isAutoModel(newModel);
|
|
341377
|
+
if (isNewModelGemini && currentAuthType === AuthType2.OPENAI && !isGeminiModel(this.openaiConfig?.model || "")) {
|
|
341378
|
+
await this.refreshAuth(AuthType2.LOGIN_WITH_GOOGLE);
|
|
341379
|
+
} else if (!isNewModelGemini && currentAuthType !== AuthType2.OPENAI && this.openaiConfig?.models?.split(",").map((m2) => m2.trim()).includes(newModel)) {
|
|
341380
|
+
await this.refreshAuth(AuthType2.OPENAI);
|
|
341381
|
+
}
|
|
341060
341382
|
coreEvents.emitModelChanged(newModel);
|
|
341061
341383
|
if (this.onModelChange && !isTemporary) {
|
|
341062
341384
|
this.onModelChange(newModel);
|
|
@@ -341065,7 +341387,7 @@ var init_config3 = __esm({
|
|
|
341065
341387
|
this.modelAvailabilityService.reset();
|
|
341066
341388
|
}
|
|
341067
341389
|
activateFallbackMode(model) {
|
|
341068
|
-
this.setModel(model, true);
|
|
341390
|
+
void this.setModel(model, true);
|
|
341069
341391
|
const authType = this.getContentGeneratorConfig()?.authType;
|
|
341070
341392
|
if (authType) {
|
|
341071
341393
|
logFlashFallback(this, new FlashFallbackEvent(authType));
|
|
@@ -341154,9 +341476,9 @@ var init_config3 = __esm({
|
|
|
341154
341476
|
this.previewFeatures = previewFeatures;
|
|
341155
341477
|
const currentModel = this.getModel();
|
|
341156
341478
|
if (!previewFeatures && isPreviewModel(currentModel)) {
|
|
341157
|
-
this.setModel(DEFAULT_CODEFLY_MODEL_AUTO);
|
|
341479
|
+
void this.setModel(DEFAULT_CODEFLY_MODEL_AUTO);
|
|
341158
341480
|
} else if (previewFeatures && currentModel === DEFAULT_CODEFLY_MODEL_AUTO) {
|
|
341159
|
-
this.setModel(PREVIEW_CODEFLY_MODEL_AUTO);
|
|
341481
|
+
void this.setModel(PREVIEW_CODEFLY_MODEL_AUTO);
|
|
341160
341482
|
}
|
|
341161
341483
|
}
|
|
341162
341484
|
getHasAccessToPreviewModel() {
|
|
@@ -341285,6 +341607,9 @@ var init_config3 = __esm({
|
|
|
341285
341607
|
getContextManager() {
|
|
341286
341608
|
return this.contextManager;
|
|
341287
341609
|
}
|
|
341610
|
+
getOpenaiContextWindowLimit() {
|
|
341611
|
+
return this.openaiConfig?.contextWindowLimit;
|
|
341612
|
+
}
|
|
341288
341613
|
isJitContextEnabled() {
|
|
341289
341614
|
return this.experimentalJitContext;
|
|
341290
341615
|
}
|
|
@@ -351163,6 +351488,7 @@ __export(dist_exports, {
|
|
|
351163
351488
|
isEditorAvailable: () => isEditorAvailable,
|
|
351164
351489
|
isEnabled: () => isEnabled,
|
|
351165
351490
|
isFatalToolError: () => isFatalToolError,
|
|
351491
|
+
isGeminiModel: () => isGeminiModel,
|
|
351166
351492
|
isGitRepository: () => isGitRepository,
|
|
351167
351493
|
isGuiEditor: () => isGuiEditor,
|
|
351168
351494
|
isInvalidArgumentError: () => isInvalidArgumentError,
|
|
@@ -418326,7 +418652,7 @@ var init_init3 = __esm({
|
|
|
418326
418652
|
init_command_generation();
|
|
418327
418653
|
init_legacy_cleanup();
|
|
418328
418654
|
init_shared2();
|
|
418329
|
-
OPENSPEC_VERSION2 = "0.24.
|
|
418655
|
+
OPENSPEC_VERSION2 = "0.24.4";
|
|
418330
418656
|
DEFAULT_SCHEMA4 = "spec-driven";
|
|
418331
418657
|
PROGRESS_SPINNER = {
|
|
418332
418658
|
interval: 80,
|
|
@@ -430826,13 +431152,14 @@ var isAtCommand = (query) => (
|
|
|
430826
431152
|
query.startsWith("@") || /\s@/.test(query)
|
|
430827
431153
|
);
|
|
430828
431154
|
var isSlashCommand = (query) => {
|
|
430829
|
-
|
|
431155
|
+
const trimmedQuery = query.trimStart();
|
|
431156
|
+
if (!trimmedQuery.startsWith("/")) {
|
|
430830
431157
|
return false;
|
|
430831
431158
|
}
|
|
430832
|
-
if (
|
|
431159
|
+
if (trimmedQuery.startsWith("//")) {
|
|
430833
431160
|
return false;
|
|
430834
431161
|
}
|
|
430835
|
-
if (
|
|
431162
|
+
if (trimmedQuery.startsWith("/*")) {
|
|
430836
431163
|
return false;
|
|
430837
431164
|
}
|
|
430838
431165
|
return true;
|
|
@@ -449168,7 +449495,7 @@ var SETTINGS_SCHEMA = {
|
|
|
449168
449495
|
category: "UI",
|
|
449169
449496
|
requiresRestart: false,
|
|
449170
449497
|
default: false,
|
|
449171
|
-
description: "Show
|
|
449498
|
+
description: "Show Codefly CLI model thoughts in the terminal window title during the working phase",
|
|
449172
449499
|
showInDialog: true
|
|
449173
449500
|
},
|
|
449174
449501
|
dynamicWindowTitle: {
|
|
@@ -449186,7 +449513,7 @@ var SETTINGS_SCHEMA = {
|
|
|
449186
449513
|
category: "UI",
|
|
449187
449514
|
requiresRestart: true,
|
|
449188
449515
|
default: true,
|
|
449189
|
-
description: "Show a warning when running
|
|
449516
|
+
description: "Show a warning when running Codefly CLI in the home directory.",
|
|
449190
449517
|
showInDialog: true
|
|
449191
449518
|
},
|
|
449192
449519
|
hideTips: {
|
|
@@ -450114,6 +450441,15 @@ var SETTINGS_SCHEMA = {
|
|
|
450114
450441
|
default: void 0,
|
|
450115
450442
|
description: "OpenAI Compatible model names (comma-separated).",
|
|
450116
450443
|
showInDialog: true
|
|
450444
|
+
},
|
|
450445
|
+
contextWindowLimit: {
|
|
450446
|
+
type: "number",
|
|
450447
|
+
label: "Context Window Limit",
|
|
450448
|
+
category: "Security",
|
|
450449
|
+
requiresRestart: true,
|
|
450450
|
+
default: void 0,
|
|
450451
|
+
description: "The maximum number of tokens allowed in the context window for OpenAI-compatible models.",
|
|
450452
|
+
showInDialog: true
|
|
450117
450453
|
}
|
|
450118
450454
|
}
|
|
450119
450455
|
},
|
|
@@ -450694,7 +451030,7 @@ var SETTINGS_SCHEMA_DEFINITIONS = {
|
|
|
450694
451030
|
},
|
|
450695
451031
|
extension: {
|
|
450696
451032
|
type: "object",
|
|
450697
|
-
description: "Metadata describing the
|
|
451033
|
+
description: "Metadata describing the Codefly CLI extension that owns this MCP server.",
|
|
450698
451034
|
additionalProperties: { type: ["string", "boolean", "number"] }
|
|
450699
451035
|
},
|
|
450700
451036
|
oauth: {
|
|
@@ -450723,7 +451059,7 @@ var SETTINGS_SCHEMA_DEFINITIONS = {
|
|
|
450723
451059
|
},
|
|
450724
451060
|
TelemetrySettings: {
|
|
450725
451061
|
type: "object",
|
|
450726
|
-
description: "Telemetry configuration for
|
|
451062
|
+
description: "Telemetry configuration for Codefly CLI.",
|
|
450727
451063
|
additionalProperties: false,
|
|
450728
451064
|
properties: {
|
|
450729
451065
|
enabled: {
|
|
@@ -450816,7 +451152,7 @@ var SETTINGS_SCHEMA_DEFINITIONS = {
|
|
|
450816
451152
|
},
|
|
450817
451153
|
CustomTheme: {
|
|
450818
451154
|
type: "object",
|
|
450819
|
-
description: "Custom theme definition used for styling
|
|
451155
|
+
description: "Custom theme definition used for styling Codefly CLI output. Colors are provided as hex strings or named ANSI colors.",
|
|
450820
451156
|
additionalProperties: false,
|
|
450821
451157
|
properties: {
|
|
450822
451158
|
type: {
|
|
@@ -453134,7 +453470,7 @@ var WarningMessage = ({ text }) => {
|
|
|
453134
453470
|
};
|
|
453135
453471
|
|
|
453136
453472
|
// packages/cli/src/generated/git-commit.ts
|
|
453137
|
-
var GIT_COMMIT_INFO2 = "
|
|
453473
|
+
var GIT_COMMIT_INFO2 = "6575fcec5";
|
|
453138
453474
|
|
|
453139
453475
|
// packages/cli/src/ui/components/AboutBox.tsx
|
|
453140
453476
|
var import_jsx_runtime34 = __toESM(require_jsx_runtime(), 1);
|
|
@@ -460956,7 +461292,7 @@ function SettingsDialog({
|
|
|
460956
461292
|
settings.workspace.path,
|
|
460957
461293
|
showRestartPrompt
|
|
460958
461294
|
]);
|
|
460959
|
-
const footerContent = showRestartPrompt ? /* @__PURE__ */ (0, import_jsx_runtime71.jsx)(Text, { color: theme.status.warning, children: "To see changes,
|
|
461295
|
+
const footerContent = showRestartPrompt ? /* @__PURE__ */ (0, import_jsx_runtime71.jsx)(Text, { color: theme.status.warning, children: "To see changes, Codefly CLI must be restarted. Press r to exit and apply changes now." }) : null;
|
|
460960
461296
|
return /* @__PURE__ */ (0, import_jsx_runtime71.jsx)(
|
|
460961
461297
|
BaseSettingsDialog,
|
|
460962
461298
|
{
|
|
@@ -461243,18 +461579,13 @@ function AuthDialog({
|
|
|
461243
461579
|
if (defaultAuthTypeEnv && Object.values(AuthType2).includes(defaultAuthTypeEnv)) {
|
|
461244
461580
|
defaultAuthType = defaultAuthTypeEnv;
|
|
461245
461581
|
}
|
|
461246
|
-
|
|
461247
|
-
|
|
461248
|
-
|
|
461249
|
-
|
|
461250
|
-
|
|
461251
|
-
|
|
461252
|
-
|
|
461253
|
-
if (process.env["GEMINI_API_KEY"]) {
|
|
461254
|
-
return item.value === AuthType2.USE_GEMINI;
|
|
461255
|
-
}
|
|
461256
|
-
return item.value === AuthType2.LOGIN_WITH_GOOGLE;
|
|
461257
|
-
});
|
|
461582
|
+
const preferredAuthType = settings.merged.security.auth.selectedType || defaultAuthType || (process.env["GEMINI_API_KEY"] ? AuthType2.USE_GEMINI : null) || (process.env["OPENAI_API_KEY"] ? AuthType2.OPENAI : null) || (process.env["GOOGLE_CLOUD_PROJECT"] && process.env["GOOGLE_CLOUD_LOCATION"] ? AuthType2.USE_VERTEX_AI : null) || AuthType2.LOGIN_WITH_GOOGLE;
|
|
461583
|
+
let initialAuthIndex = items.findIndex(
|
|
461584
|
+
(item) => item.value === preferredAuthType
|
|
461585
|
+
);
|
|
461586
|
+
if (initialAuthIndex === -1) {
|
|
461587
|
+
initialAuthIndex = 0;
|
|
461588
|
+
}
|
|
461258
461589
|
if (settings.merged.security.auth.enforcedType) {
|
|
461259
461590
|
initialAuthIndex = 0;
|
|
461260
461591
|
}
|
|
@@ -462779,7 +463110,7 @@ function ModelDialog({ onClose }) {
|
|
|
462779
463110
|
{
|
|
462780
463111
|
value: DEFAULT_CODEFLY_MODEL_AUTO,
|
|
462781
463112
|
title: getDisplayString(DEFAULT_CODEFLY_MODEL_AUTO),
|
|
462782
|
-
description: "Let
|
|
463113
|
+
description: "Let Codefly CLI decide the best model for the task: gemini-2.5-pro, gemini-2.5-flash",
|
|
462783
463114
|
key: DEFAULT_CODEFLY_MODEL_AUTO
|
|
462784
463115
|
},
|
|
462785
463116
|
{
|
|
@@ -462805,7 +463136,7 @@ function ModelDialog({ onClose }) {
|
|
|
462805
463136
|
list2.unshift({
|
|
462806
463137
|
value: PREVIEW_CODEFLY_MODEL_AUTO,
|
|
462807
463138
|
title: getDisplayString(PREVIEW_CODEFLY_MODEL_AUTO),
|
|
462808
|
-
description: "Let
|
|
463139
|
+
description: "Let Codefly CLI decide the best model for the task: gemini-3-pro, gemini-3-flash",
|
|
462809
463140
|
key: PREVIEW_CODEFLY_MODEL_AUTO
|
|
462810
463141
|
});
|
|
462811
463142
|
}
|
|
@@ -462890,7 +463221,7 @@ function ModelDialog({ onClose }) {
|
|
|
462890
463221
|
return;
|
|
462891
463222
|
}
|
|
462892
463223
|
if (config2) {
|
|
462893
|
-
config2.setModel(model, persistMode ? false : true);
|
|
463224
|
+
void config2.setModel(model, persistMode ? false : true);
|
|
462894
463225
|
const event = new ModelSlashCommandEvent(model);
|
|
462895
463226
|
logModelSlashCommand(config2, event);
|
|
462896
463227
|
}
|
|
@@ -463607,7 +463938,7 @@ var INFORMATIVE_TIPS = [
|
|
|
463607
463938
|
"Enable AI-powered prompt completion while typing (/settings)\u2026",
|
|
463608
463939
|
"Enable debug logging of keystrokes to the console (/settings)\u2026",
|
|
463609
463940
|
"Enable automatic session cleanup of old conversations (/settings)\u2026",
|
|
463610
|
-
"Show
|
|
463941
|
+
"Show Codefly CLI status in the terminal window title (/settings)\u2026",
|
|
463611
463942
|
"Use the entire width of the terminal for output (/settings)\u2026",
|
|
463612
463943
|
"Enable screen reader mode for better accessibility (/settings)\u2026",
|
|
463613
463944
|
"Skip the next speaker check for faster responses (/settings)\u2026",
|
|
@@ -465050,7 +465381,8 @@ function useCommandParser(query, slashCommands) {
|
|
|
465050
465381
|
isArgumentCompletion: false
|
|
465051
465382
|
};
|
|
465052
465383
|
}
|
|
465053
|
-
const
|
|
465384
|
+
const trimmedQuery = query.trimStart();
|
|
465385
|
+
const fullPath = trimmedQuery.substring(1) || "";
|
|
465054
465386
|
const hasTrailingSpace = !!query.endsWith(" ");
|
|
465055
465387
|
const rawParts = fullPath.split(/\s+/).filter((p) => p);
|
|
465056
465388
|
let commandPathParts = rawParts;
|
|
@@ -465802,7 +466134,7 @@ init_dist7();
|
|
|
465802
466134
|
// packages/cli/src/ui/utils/highlight.ts
|
|
465803
466135
|
init_mnemonist();
|
|
465804
466136
|
var HIGHLIGHT_REGEX = new RegExp(
|
|
465805
|
-
`(
|
|
466137
|
+
`((?:^|\\s)/[a-zA-Z0-9_-]*|@(?:\\\\ |[^,\\s;!?()\\[\\]{}])+|${PASTED_TEXT_PLACEHOLDER_REGEX.source})`,
|
|
465806
466138
|
"g"
|
|
465807
466139
|
);
|
|
465808
466140
|
var highlightCache = new import_lru_cache.default(
|
|
@@ -465837,12 +466169,20 @@ function parseInputForHighlighting(text, index, transformations = [], cursorCol)
|
|
|
465837
466169
|
if (matchIndex > last2) {
|
|
465838
466170
|
tokens2.push({ text: text2.slice(last2, matchIndex), type: "default" });
|
|
465839
466171
|
}
|
|
465840
|
-
const
|
|
465841
|
-
|
|
465842
|
-
|
|
465843
|
-
|
|
465844
|
-
|
|
466172
|
+
const trimmed2 = fullMatch.trimStart();
|
|
466173
|
+
let type2 = "paste";
|
|
466174
|
+
if (trimmed2.startsWith("/")) {
|
|
466175
|
+
type2 = "command";
|
|
466176
|
+
} else if (trimmed2.startsWith("@")) {
|
|
466177
|
+
type2 = "file";
|
|
466178
|
+
}
|
|
466179
|
+
if (type2 === "command") {
|
|
466180
|
+
const prefix = text2.slice(0, match2.index);
|
|
466181
|
+
if (index !== 0 || prefix.trim().length > 0) {
|
|
466182
|
+
type2 = "default";
|
|
466183
|
+
}
|
|
465845
466184
|
}
|
|
466185
|
+
tokens2.push({ text: fullMatch, type: type2 });
|
|
465846
466186
|
last2 = matchIndex + fullMatch.length;
|
|
465847
466187
|
}
|
|
465848
466188
|
if (last2 < text2.length) {
|
|
@@ -465868,7 +466208,16 @@ function parseInputForHighlighting(text, index, transformations = [], cursorCol)
|
|
|
465868
466208
|
const textAfterFinalTransformation = cpSlice(text, column);
|
|
465869
466209
|
tokens.push(...parseUntransformedInput(textAfterFinalTransformation));
|
|
465870
466210
|
highlightCache.set(cacheKey, tokens);
|
|
465871
|
-
|
|
466211
|
+
const mergedTokens = [];
|
|
466212
|
+
for (const token2 of tokens) {
|
|
466213
|
+
const last2 = mergedTokens[mergedTokens.length - 1];
|
|
466214
|
+
if (last2 && last2.type === token2.type) {
|
|
466215
|
+
last2.text += token2.text;
|
|
466216
|
+
} else {
|
|
466217
|
+
mergedTokens.push(token2);
|
|
466218
|
+
}
|
|
466219
|
+
}
|
|
466220
|
+
return mergedTokens;
|
|
465872
466221
|
}
|
|
465873
466222
|
function parseSegmentsFromTokens(tokens, sliceStart, sliceEnd) {
|
|
465874
466223
|
if (sliceStart >= sliceEnd) return [];
|
|
@@ -466657,7 +467006,7 @@ var InputPrompt = ({
|
|
|
466657
467006
|
width: suggestionsWidth,
|
|
466658
467007
|
scrollOffset: activeCompletion.visibleStartIndex,
|
|
466659
467008
|
userInput: buffer.text,
|
|
466660
|
-
mode: completion3.completionMode === "AT" /* AT */ ? "reverse" : buffer.text
|
|
467009
|
+
mode: completion3.completionMode === "AT" /* AT */ ? "reverse" : isSlashCommand(buffer.text) && !reverseSearchActive && !commandSearchActive ? "slash" : "reverse",
|
|
466661
467010
|
expandedIndex: expandedSuggestionIndex
|
|
466662
467011
|
}
|
|
466663
467012
|
) }) : null;
|
|
@@ -468251,13 +468600,16 @@ var CommandService = class _CommandService {
|
|
|
468251
468600
|
loaders.map((loader2) => loader2.loadCommands(signal))
|
|
468252
468601
|
);
|
|
468253
468602
|
const allCommands = [];
|
|
468254
|
-
|
|
468603
|
+
results.forEach((result2, index) => {
|
|
468255
468604
|
if (result2.status === "fulfilled") {
|
|
468256
468605
|
allCommands.push(...result2.value);
|
|
468257
468606
|
} else {
|
|
468258
|
-
debugLogger.
|
|
468607
|
+
debugLogger.error(
|
|
468608
|
+
`Command loader ${loaders[index].constructor.name} failed:`,
|
|
468609
|
+
result2.reason
|
|
468610
|
+
);
|
|
468259
468611
|
}
|
|
468260
|
-
}
|
|
468612
|
+
});
|
|
468261
468613
|
const commandMap = /* @__PURE__ */ new Map();
|
|
468262
468614
|
for (const cmd of allCommands) {
|
|
468263
468615
|
let finalName = cmd.name;
|
|
@@ -476760,7 +477112,7 @@ async function copyExtension(source2, destination) {
|
|
|
476760
477112
|
}
|
|
476761
477113
|
function getContextFileNames(config2) {
|
|
476762
477114
|
if (!config2.contextFileName) {
|
|
476763
|
-
return ["CODEFLY.md"
|
|
477115
|
+
return ["CODEFLY.md"];
|
|
476764
477116
|
} else if (!Array.isArray(config2.contextFileName)) {
|
|
476765
477117
|
return [config2.contextFileName];
|
|
476766
477118
|
}
|
|
@@ -480659,7 +481011,7 @@ function enableCompletion(context2, partialArg) {
|
|
|
480659
481011
|
}
|
|
480660
481012
|
var skillsCommand = {
|
|
480661
481013
|
name: "skills",
|
|
480662
|
-
description: "List, enable, disable, or reload
|
|
481014
|
+
description: "List, enable, disable, or reload Codefly CLI agent skills. Usage: /skills [list | disable <name> | enable <name> | reload]",
|
|
480663
481015
|
kind: "built-in" /* BUILT_IN */,
|
|
480664
481016
|
autoExecute: false,
|
|
480665
481017
|
subCommands: [
|
|
@@ -481040,90 +481392,110 @@ var BuiltinCommandLoader = class {
|
|
|
481040
481392
|
*/
|
|
481041
481393
|
async loadCommands(_signal) {
|
|
481042
481394
|
const handle2 = startupProfiler.start("load_builtin_commands");
|
|
481043
|
-
|
|
481044
|
-
|
|
481045
|
-
|
|
481046
|
-
|
|
481047
|
-
|
|
481048
|
-
|
|
481049
|
-
|
|
481050
|
-
|
|
481051
|
-
|
|
481052
|
-
|
|
481053
|
-
|
|
481054
|
-
|
|
481055
|
-
|
|
481056
|
-
corgiCommand,
|
|
481057
|
-
docsCommand,
|
|
481058
|
-
directoryCommand,
|
|
481059
|
-
editorCommand,
|
|
481060
|
-
...this.config?.getExtensionsEnabled() === false ? [
|
|
481061
|
-
{
|
|
481062
|
-
name: "extensions",
|
|
481063
|
-
description: "Manage extensions",
|
|
481064
|
-
kind: "built-in" /* BUILT_IN */,
|
|
481065
|
-
autoExecute: false,
|
|
481066
|
-
subCommands: [],
|
|
481067
|
-
action: async (_context) => ({
|
|
481068
|
-
type: "message",
|
|
481069
|
-
messageType: "error",
|
|
481070
|
-
content: "Extensions are disabled by your admin."
|
|
481071
|
-
})
|
|
481072
|
-
}
|
|
481073
|
-
] : [extensionsCommand(this.config?.getEnableExtensionReloading())],
|
|
481074
|
-
helpCommand,
|
|
481075
|
-
...this.config?.getEnableHooksUI() ? [hooksCommand] : [],
|
|
481076
|
-
rewindCommand,
|
|
481077
|
-
await ideCommand(),
|
|
481078
|
-
initCommand,
|
|
481079
|
-
...this.config?.getMcpEnabled() === false ? [
|
|
481080
|
-
{
|
|
481081
|
-
name: "mcp",
|
|
481082
|
-
description: "Manage configured Model Context Protocol (MCP) servers",
|
|
481083
|
-
kind: "built-in" /* BUILT_IN */,
|
|
481084
|
-
autoExecute: false,
|
|
481085
|
-
subCommands: [],
|
|
481086
|
-
action: async (_context) => ({
|
|
481087
|
-
type: "message",
|
|
481088
|
-
messageType: "error",
|
|
481089
|
-
content: "MCP is disabled by your admin."
|
|
481090
|
-
})
|
|
481091
|
-
}
|
|
481092
|
-
] : [mcpCommand],
|
|
481093
|
-
memoryCommand,
|
|
481094
|
-
modelCommand,
|
|
481095
|
-
openspecCommand,
|
|
481096
|
-
...this.config?.getFolderTrust() ? [permissionsCommand] : [],
|
|
481097
|
-
privacyCommand,
|
|
481098
|
-
policiesCommand,
|
|
481099
|
-
...isDevelopment ? [profileCommand] : [],
|
|
481100
|
-
quitCommand,
|
|
481101
|
-
restoreCommand(this.config),
|
|
481102
|
-
resumeCommand2,
|
|
481103
|
-
statsCommand,
|
|
481104
|
-
themeCommand,
|
|
481105
|
-
toolsCommand,
|
|
481106
|
-
...this.config?.isSkillsSupportEnabled() ? this.config?.getSkillManager()?.isAdminEnabled() === false ? [
|
|
481395
|
+
let allDefinitions = [];
|
|
481396
|
+
try {
|
|
481397
|
+
let isNightlyBuild = false;
|
|
481398
|
+
try {
|
|
481399
|
+
isNightlyBuild = await isNightly(process.cwd());
|
|
481400
|
+
} catch (e3) {
|
|
481401
|
+
debugLogger.debug("Failed to check for nightly build:", e3);
|
|
481402
|
+
}
|
|
481403
|
+
allDefinitions = [
|
|
481404
|
+
aboutCommand,
|
|
481405
|
+
...this.config?.isAgentsEnabled() ? [agentsCommand] : [],
|
|
481406
|
+
authCommand,
|
|
481407
|
+
bugCommand,
|
|
481107
481408
|
{
|
|
481108
|
-
|
|
481109
|
-
|
|
481110
|
-
|
|
481111
|
-
|
|
481112
|
-
|
|
481113
|
-
|
|
481114
|
-
|
|
481115
|
-
|
|
481116
|
-
|
|
481117
|
-
|
|
481118
|
-
|
|
481119
|
-
|
|
481120
|
-
|
|
481121
|
-
|
|
481122
|
-
|
|
481123
|
-
|
|
481124
|
-
|
|
481125
|
-
|
|
481126
|
-
|
|
481409
|
+
...chatCommand,
|
|
481410
|
+
subCommands: isNightlyBuild ? [...chatCommand.subCommands || [], debugCommand] : chatCommand.subCommands
|
|
481411
|
+
},
|
|
481412
|
+
clearCommand,
|
|
481413
|
+
compressCommand,
|
|
481414
|
+
copyCommand,
|
|
481415
|
+
corgiCommand,
|
|
481416
|
+
docsCommand,
|
|
481417
|
+
directoryCommand,
|
|
481418
|
+
editorCommand,
|
|
481419
|
+
...this.config?.getExtensionsEnabled() === false ? [
|
|
481420
|
+
{
|
|
481421
|
+
name: "extensions",
|
|
481422
|
+
description: "Manage extensions",
|
|
481423
|
+
kind: "built-in" /* BUILT_IN */,
|
|
481424
|
+
autoExecute: false,
|
|
481425
|
+
subCommands: [],
|
|
481426
|
+
action: async (_context) => ({
|
|
481427
|
+
type: "message",
|
|
481428
|
+
messageType: "error",
|
|
481429
|
+
content: "Extensions are disabled by your admin."
|
|
481430
|
+
})
|
|
481431
|
+
}
|
|
481432
|
+
] : [extensionsCommand(this.config?.getEnableExtensionReloading())],
|
|
481433
|
+
helpCommand,
|
|
481434
|
+
...this.config?.getEnableHooksUI() ? [hooksCommand] : [],
|
|
481435
|
+
rewindCommand,
|
|
481436
|
+
...await (async () => {
|
|
481437
|
+
try {
|
|
481438
|
+
const timeoutPromise = new Promise((_2, reject) => {
|
|
481439
|
+
setTimeout(() => reject(new Error("ideCommand timed out")), 2e3);
|
|
481440
|
+
});
|
|
481441
|
+
return [await Promise.race([ideCommand(), timeoutPromise])];
|
|
481442
|
+
} catch (e3) {
|
|
481443
|
+
debugLogger.debug("Failed to load ide command:", e3);
|
|
481444
|
+
return [];
|
|
481445
|
+
}
|
|
481446
|
+
})(),
|
|
481447
|
+
initCommand,
|
|
481448
|
+
...this.config?.getMcpEnabled() === false ? [
|
|
481449
|
+
{
|
|
481450
|
+
name: "mcp",
|
|
481451
|
+
description: "Manage configured Model Context Protocol (MCP) servers",
|
|
481452
|
+
kind: "built-in" /* BUILT_IN */,
|
|
481453
|
+
autoExecute: false,
|
|
481454
|
+
subCommands: [],
|
|
481455
|
+
action: async (_context) => ({
|
|
481456
|
+
type: "message",
|
|
481457
|
+
messageType: "error",
|
|
481458
|
+
content: "MCP is disabled by your admin."
|
|
481459
|
+
})
|
|
481460
|
+
}
|
|
481461
|
+
] : [mcpCommand],
|
|
481462
|
+
memoryCommand,
|
|
481463
|
+
modelCommand,
|
|
481464
|
+
openspecCommand,
|
|
481465
|
+
...this.config?.getFolderTrust() ? [permissionsCommand] : [],
|
|
481466
|
+
privacyCommand,
|
|
481467
|
+
policiesCommand,
|
|
481468
|
+
...isDevelopment ? [profileCommand] : [],
|
|
481469
|
+
quitCommand,
|
|
481470
|
+
restoreCommand(this.config),
|
|
481471
|
+
resumeCommand2,
|
|
481472
|
+
statsCommand,
|
|
481473
|
+
themeCommand,
|
|
481474
|
+
toolsCommand,
|
|
481475
|
+
...this.config?.isSkillsSupportEnabled() ? this.config?.getSkillManager()?.isAdminEnabled() === false ? [
|
|
481476
|
+
{
|
|
481477
|
+
name: "skills",
|
|
481478
|
+
description: "Manage agent skills",
|
|
481479
|
+
kind: "built-in" /* BUILT_IN */,
|
|
481480
|
+
autoExecute: false,
|
|
481481
|
+
subCommands: [],
|
|
481482
|
+
action: async (_context) => ({
|
|
481483
|
+
type: "message",
|
|
481484
|
+
messageType: "error",
|
|
481485
|
+
content: "Agent skills are disabled by your admin."
|
|
481486
|
+
})
|
|
481487
|
+
}
|
|
481488
|
+
] : [skillsCommand] : [],
|
|
481489
|
+
settingsCommand,
|
|
481490
|
+
vimCommand,
|
|
481491
|
+
setupGithubCommand,
|
|
481492
|
+
terminalSetupCommand
|
|
481493
|
+
];
|
|
481494
|
+
} catch (e3) {
|
|
481495
|
+
debugLogger.error("Critical failure in BuiltinCommandLoader:", e3);
|
|
481496
|
+
} finally {
|
|
481497
|
+
handle2?.end();
|
|
481498
|
+
}
|
|
481127
481499
|
return allDefinitions.filter((cmd) => cmd !== null);
|
|
481128
481500
|
}
|
|
481129
481501
|
};
|
|
@@ -482588,7 +482960,7 @@ function computeTerminalTitle({
|
|
|
482588
482960
|
const MAX_LEN = 80;
|
|
482589
482961
|
let displayContext = process.env["CLI_TITLE"] || folderName;
|
|
482590
482962
|
if (!useDynamicTitle) {
|
|
482591
|
-
const base = "
|
|
482963
|
+
const base = "Codefly CLI ";
|
|
482592
482964
|
const maxContextLen = MAX_LEN - base.length - 2;
|
|
482593
482965
|
displayContext = truncate(displayContext, maxContextLen);
|
|
482594
482966
|
return `${base}(${displayContext})`.padEnd(MAX_LEN, " ");
|
|
@@ -484328,11 +484700,15 @@ var useGeminiStream = (geminiClient, history, addItem, config2, settings, onDebu
|
|
|
484328
484700
|
const handleContextWindowWillOverflowEvent = (0, import_react131.useCallback)(
|
|
484329
484701
|
(estimatedRequestTokenCount, remainingTokenCount) => {
|
|
484330
484702
|
onCancelSubmit(true);
|
|
484331
|
-
const limit = tokenLimit(config2.getModel());
|
|
484332
|
-
const isLessThan75Percent = limit > 0 && remainingTokenCount < limit * 0.75;
|
|
484703
|
+
const limit = tokenLimit(config2.getModel(), config2);
|
|
484333
484704
|
let text = `Sending this message (${estimatedRequestTokenCount} tokens) might exceed the remaining context window limit (${remainingTokenCount} tokens).`;
|
|
484334
|
-
if (
|
|
484335
|
-
text
|
|
484705
|
+
if (estimatedRequestTokenCount > limit) {
|
|
484706
|
+
text = `This message is too large (${estimatedRequestTokenCount} tokens) and exceeds the total context window limit (${limit} tokens). Please try reducing the size of your message by removing large file attachments or @ commands.`;
|
|
484707
|
+
} else {
|
|
484708
|
+
const isLessThan75Percent = limit > 0 && remainingTokenCount < limit * 0.75;
|
|
484709
|
+
if (isLessThan75Percent) {
|
|
484710
|
+
text += " Please try reducing the size of your message or use the `/compress` command to compress the chat history.";
|
|
484711
|
+
}
|
|
484336
484712
|
}
|
|
484337
484713
|
addItem({
|
|
484338
484714
|
type: "info",
|
|
@@ -487972,8 +488348,12 @@ var AppContainer = (props) => {
|
|
|
487972
488348
|
apiKey,
|
|
487973
488349
|
baseUrl: baseUrl || "",
|
|
487974
488350
|
models: models || "",
|
|
487975
|
-
model: updatedModel
|
|
488351
|
+
model: updatedModel,
|
|
488352
|
+
contextWindowLimit: settings.merged.security.auth.openai?.contextWindowLimit
|
|
487976
488353
|
};
|
|
488354
|
+
if (updatedModel) {
|
|
488355
|
+
await config2.setModel(updatedModel);
|
|
488356
|
+
}
|
|
487977
488357
|
}
|
|
487978
488358
|
if (currentAuthType === AuthType2.USE_GEMINI) {
|
|
487979
488359
|
await saveApiKey(apiKey);
|
|
@@ -495690,7 +496070,7 @@ init_shared2();
|
|
|
495690
496070
|
init_legacy_cleanup();
|
|
495691
496071
|
init_interactive();
|
|
495692
496072
|
import path153 from "node:path";
|
|
495693
|
-
var OPENSPEC_VERSION = "0.24.
|
|
496073
|
+
var OPENSPEC_VERSION = "0.24.4";
|
|
495694
496074
|
var UpdateCommand = class {
|
|
495695
496075
|
force;
|
|
495696
496076
|
constructor(options = {}) {
|
|
@@ -501651,7 +502031,7 @@ function isGhAuthenticated() {
|
|
|
501651
502031
|
}
|
|
501652
502032
|
}
|
|
501653
502033
|
function getVersion2() {
|
|
501654
|
-
return "0.24.
|
|
502034
|
+
return "0.24.4";
|
|
501655
502035
|
}
|
|
501656
502036
|
function getPlatform2() {
|
|
501657
502037
|
return os33.platform();
|
|
@@ -503940,7 +504320,7 @@ ${options.description}
|
|
|
503940
504320
|
|
|
503941
504321
|
// packages/core/dist/src/openspec/cli/index.js
|
|
503942
504322
|
var program2 = new Command2();
|
|
503943
|
-
var version3 = "0.24.
|
|
504323
|
+
var version3 = "0.24.4";
|
|
503944
504324
|
program2.name("openspec").description("AI-native system for spec-driven development").version(version3);
|
|
503945
504325
|
program2.option("--no-color", "Disable color output");
|
|
503946
504326
|
program2.hook("preAction", async (thisCommand) => {
|
|
@@ -505703,7 +506083,7 @@ var homeDirectoryCheck = {
|
|
|
505703
506083
|
if (isFolderTrustEnabled(settings) && isWorkspaceTrusted(settings).isTrusted) {
|
|
505704
506084
|
return null;
|
|
505705
506085
|
}
|
|
505706
|
-
return "Warning you are running
|
|
506086
|
+
return "Warning you are running Codefly CLI in your home directory.\nThis warning can be disabled in /settings";
|
|
505707
506087
|
}
|
|
505708
506088
|
return null;
|
|
505709
506089
|
} catch (_err) {
|
|
@@ -505716,7 +506096,7 @@ var rootDirectoryCheck = {
|
|
|
505716
506096
|
check: async (workspaceRoot, _settings) => {
|
|
505717
506097
|
try {
|
|
505718
506098
|
const workspaceRealPath = await fs143.realpath(workspaceRoot);
|
|
505719
|
-
const errorMessage = "Warning: You are running
|
|
506099
|
+
const errorMessage = "Warning: You are running Codefly CLI in the root directory. Your entire folder structure will be used for context. It is strongly recommended to run in a project-specific directory.";
|
|
505720
506100
|
if (path186.dirname(workspaceRealPath) === workspaceRealPath) {
|
|
505721
506101
|
return errorMessage;
|
|
505722
506102
|
}
|