@bonginkan/maria 4.2.29 → 4.2.30
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -5
- package/dist/READY.manifest.json +1 -1
- package/dist/bin/maria.cjs +29 -25
- package/dist/bin/maria.cjs.map +1 -1
- package/dist/cli.cjs +29 -25
- package/dist/cli.cjs.map +1 -1
- package/dist/index.js +2 -2
- package/dist/index.js.map +1 -1
- package/dist/server/express-server.cjs +2 -2
- package/dist/server/express-server.cjs.map +1 -1
- package/package.json +2 -2
- package/src/slash-commands/READY.manifest.json +1 -1
package/dist/cli.cjs
CHANGED
|
@@ -1893,7 +1893,7 @@ var init_AuthenticationManager = __esm({
|
|
|
1893
1893
|
const response2 = await fetch(`${this.apiBase}/api/user/profile`, {
|
|
1894
1894
|
headers: {
|
|
1895
1895
|
"Authorization": `Bearer ${tokens.accessToken}`,
|
|
1896
|
-
"User-Agent": `maria-cli/${process.env.CLI_VERSION || "4.2.
|
|
1896
|
+
"User-Agent": `maria-cli/${process.env.CLI_VERSION || "4.2.30"}`
|
|
1897
1897
|
}
|
|
1898
1898
|
});
|
|
1899
1899
|
if (response2.status === 401) {
|
|
@@ -3814,7 +3814,7 @@ var DEFAULT_PROVIDER2, DEFAULT_MODEL2;
|
|
|
3814
3814
|
var init_config = __esm({
|
|
3815
3815
|
"src/providers/config.ts"() {
|
|
3816
3816
|
DEFAULT_PROVIDER2 = process.env.DEFAULT_PROVIDER || "openai";
|
|
3817
|
-
DEFAULT_MODEL2 = process.env.
|
|
3817
|
+
DEFAULT_MODEL2 = process.env.OPENAI_MODEL || process.env.MARIA_DEFAULT_MODEL || "gpt-5-mini";
|
|
3818
3818
|
}
|
|
3819
3819
|
});
|
|
3820
3820
|
|
|
@@ -11064,8 +11064,8 @@ var init_manager = __esm({
|
|
|
11064
11064
|
/** Legacy sync method kept for backward compatibility (minimal) */
|
|
11065
11065
|
getAvailableModels() {
|
|
11066
11066
|
const out = [];
|
|
11067
|
-
if (this.available.has("openai")) out.push("gpt-
|
|
11068
|
-
if (this.available.has("anthropic")) out.push("claude-
|
|
11067
|
+
if (this.available.has("openai")) out.push("gpt-5", "gpt-5-mini", "gpt-4o", "gpt-4o-mini");
|
|
11068
|
+
if (this.available.has("anthropic")) out.push("claude-opus-4-1-20250805", "claude-opus-4-20250514", "claude-sonnet-4-20250514");
|
|
11069
11069
|
if (this.available.has("google")) out.push("gemini-2.5-pro", "gemini-2.5-flash");
|
|
11070
11070
|
if (this.available.has("grok")) out.push("grok-4", "grok-beta");
|
|
11071
11071
|
return out;
|
|
@@ -11191,12 +11191,15 @@ var init_manager = __esm({
|
|
|
11191
11191
|
if (!apiKey) throw new Error("OpenAI API key not configured");
|
|
11192
11192
|
const controller = new AbortController();
|
|
11193
11193
|
const timeout = setTimeout(() => controller.abort(), 3e5);
|
|
11194
|
-
const modelName = req.model || "gpt-5-mini
|
|
11195
|
-
const isGPT5 = modelName.
|
|
11194
|
+
const modelName = req.model || process.env.OPENAI_MODEL || "gpt-5-mini";
|
|
11195
|
+
const isGPT5 = /\bgpt-5\b/i.test(modelName) || modelName.toLowerCase().startsWith("gpt-5");
|
|
11196
11196
|
const bodyParams = {
|
|
11197
11197
|
model: modelName,
|
|
11198
11198
|
messages: [
|
|
11199
|
-
{
|
|
11199
|
+
{
|
|
11200
|
+
role: "system",
|
|
11201
|
+
content: "You are a helpful assistant. Always respond in English. Provide direct, clear answers without menus or numbered options."
|
|
11202
|
+
},
|
|
11200
11203
|
{ role: "user", content: prompt }
|
|
11201
11204
|
]
|
|
11202
11205
|
};
|
|
@@ -11221,7 +11224,7 @@ var init_manager = __esm({
|
|
|
11221
11224
|
throw new Error(`OpenAI ${res.status}: ${txt}`);
|
|
11222
11225
|
}
|
|
11223
11226
|
const json = await res.json();
|
|
11224
|
-
return { content: json.choices?.[0]?.message?.content ?? "" };
|
|
11227
|
+
return { content: json.choices?.[0]?.message?.content ?? "", model: json.model };
|
|
11225
11228
|
} catch (error2) {
|
|
11226
11229
|
clearTimeout(timeout);
|
|
11227
11230
|
if (error2.name === "AbortError" || error2.message?.includes("abort")) {
|
|
@@ -12443,7 +12446,7 @@ var init_ai_response_service = __esm({
|
|
|
12443
12446
|
*/
|
|
12444
12447
|
async callLLM(prompt, opts = {}) {
|
|
12445
12448
|
const {
|
|
12446
|
-
system = PLAIN_OUTPUT ? "Return ONLY the answer (or ONLY code). No menus, no lists, no guided flows." : "You are a helpful senior engineer. Provide direct, production-quality answers.",
|
|
12449
|
+
system = PLAIN_OUTPUT ? "Return ONLY the answer (or ONLY code). No menus, no lists, no guided flows. Always respond in English." : "You are a helpful senior engineer. Always respond in English. Provide direct, production-quality answers.",
|
|
12447
12450
|
model = DEFAULT_MODEL2,
|
|
12448
12451
|
provider = DEFAULT_PROVIDER2,
|
|
12449
12452
|
temperature = 0.2,
|
|
@@ -19127,8 +19130,8 @@ var require_package = __commonJS({
|
|
|
19127
19130
|
"package.json"(exports, module) {
|
|
19128
19131
|
module.exports = {
|
|
19129
19132
|
name: "@bonginkan/maria",
|
|
19130
|
-
version: "4.2.
|
|
19131
|
-
description: "\u{1F680} MARIA v4.2.
|
|
19133
|
+
version: "4.2.30",
|
|
19134
|
+
description: "\u{1F680} MARIA v4.2.30 - Enterprise AI Development Platform with 100% Command Availability. Features 74 production-ready commands with comprehensive fallback implementation, local LLM support, and zero external dependencies. Includes natural language coding, AI safety evaluation, intelligent evolution system, episodic memory with PII masking, and real-time monitoring dashboard. Built with TypeScript AST-powered code generation, OAuth2.0 + PKCE authentication, quantum-resistant cryptography, and enterprise-grade performance.",
|
|
19132
19135
|
keywords: [
|
|
19133
19136
|
"ai",
|
|
19134
19137
|
"cli",
|
|
@@ -31589,7 +31592,7 @@ var init_about_command = __esm({
|
|
|
31589
31592
|
async execute(args2, context2) {
|
|
31590
31593
|
const output3 = [];
|
|
31591
31594
|
output3.push("");
|
|
31592
|
-
output3.push(chalk28__default.default.cyan.bold("\u{1F916} About MARIA v4.2.
|
|
31595
|
+
output3.push(chalk28__default.default.cyan.bold("\u{1F916} About MARIA v4.2.30"));
|
|
31593
31596
|
output3.push(chalk28__default.default.gray("\u2550".repeat(40)));
|
|
31594
31597
|
output3.push("");
|
|
31595
31598
|
output3.push(chalk28__default.default.white.bold("MARIA - Minimal API, Maximum Power"));
|
|
@@ -58937,17 +58940,17 @@ async function handleCodeCommand(prompt) {
|
|
|
58937
58940
|
const response2 = await generateStrictCode(prompt);
|
|
58938
58941
|
spinner.stop();
|
|
58939
58942
|
if (response2) {
|
|
58940
|
-
console.log(response2);
|
|
58943
|
+
console.log(chalk28__default.default.green(response2));
|
|
58941
58944
|
const { language, code, extension } = extractCodeInfo(response2);
|
|
58942
58945
|
const filename = generateCodeFilename(prompt, language, extension);
|
|
58943
58946
|
const filepath = path8__namespace.resolve(process.cwd(), filename);
|
|
58944
58947
|
await fsp__namespace.writeFile(filepath, code, "utf-8");
|
|
58945
58948
|
console.log(
|
|
58946
|
-
chalk28__default.default.green("\n\u2705
|
|
58947
|
-
`) + chalk28__default.default.
|
|
58948
|
-
`) + chalk28__default.default.
|
|
58949
|
-
`) + chalk28__default.default.
|
|
58950
|
-
`) + chalk28__default.default.
|
|
58949
|
+
chalk28__default.default.green("\n\u2705 Code Saved\n") + chalk28__default.default.green(`\u{1F4C1} File (Click to open):
|
|
58950
|
+
`) + chalk28__default.default.green(`\u2022 [${filename}](file://${filepath})
|
|
58951
|
+
`) + chalk28__default.default.green(` \u{1F4CD} Path: \`${filepath}\`
|
|
58952
|
+
`) + chalk28__default.default.green(` \u{1F4DD} Language: ${language}
|
|
58953
|
+
`) + chalk28__default.default.green(`
|
|
58951
58954
|
\u{1F4A1} Tip: Command+Click (Mac) or Ctrl+Click (Windows/Linux) to open file`)
|
|
58952
58955
|
);
|
|
58953
58956
|
} else {
|
|
@@ -58962,17 +58965,18 @@ async function handleCodeCommand(prompt) {
|
|
|
58962
58965
|
console.log(
|
|
58963
58966
|
chalk28__default.default.yellow("\u26A0 AI unavailable, using template fallback:\n")
|
|
58964
58967
|
);
|
|
58965
|
-
console.log(fallbackCode);
|
|
58968
|
+
console.log(chalk28__default.default.green(fallbackCode));
|
|
58966
58969
|
try {
|
|
58967
58970
|
const { language, code, extension } = extractCodeInfo(fallbackCode);
|
|
58968
58971
|
const filename = generateCodeFilename(prompt, language, extension);
|
|
58969
58972
|
const filepath = path8__namespace.resolve(process.cwd(), filename);
|
|
58970
58973
|
await fsp__namespace.writeFile(filepath, code, "utf-8");
|
|
58971
58974
|
console.log(
|
|
58972
|
-
chalk28__default.default.green("\n\u2705
|
|
58973
|
-
`) + chalk28__default.default.
|
|
58974
|
-
`) + chalk28__default.default.
|
|
58975
|
-
`) + chalk28__default.default.
|
|
58975
|
+
chalk28__default.default.green("\n\u2705 Template Code Saved\n") + chalk28__default.default.green(`\u{1F4C1} File (Click to open):
|
|
58976
|
+
`) + chalk28__default.default.green(`\u2022 [${filename}](file://${filepath})
|
|
58977
|
+
`) + chalk28__default.default.green(` \u{1F4CD} Path: \`${filepath}\`
|
|
58978
|
+
`) + chalk28__default.default.green(` \u{1F4DD} Language: ${language}
|
|
58979
|
+
`) + chalk28__default.default.green(`
|
|
58976
58980
|
\u{1F4A1} Tip: Command+Click (Mac) or Ctrl+Click (Windows/Linux) to open file`)
|
|
58977
58981
|
);
|
|
58978
58982
|
} catch (saveError) {
|
|
@@ -59191,9 +59195,9 @@ async function streamAnswer(text) {
|
|
|
59191
59195
|
});
|
|
59192
59196
|
animation.stop();
|
|
59193
59197
|
if (ai.streamResponse) {
|
|
59194
|
-
await ai.streamResponse(resp, (line) => console.log(line));
|
|
59198
|
+
await ai.streamResponse(resp, (line) => console.log(chalk28__default.default.green(line)));
|
|
59195
59199
|
} else {
|
|
59196
|
-
console.log(resp);
|
|
59200
|
+
console.log(chalk28__default.default.green(resp));
|
|
59197
59201
|
}
|
|
59198
59202
|
const msg = { role: "assistant", content: resp, timestamp: /* @__PURE__ */ new Date() };
|
|
59199
59203
|
session.push(msg);
|