@bonginkan/maria 4.2.29 → 4.2.31
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -5
- package/dist/READY.manifest.json +1 -1
- package/dist/bin/maria.cjs +52 -28
- package/dist/bin/maria.cjs.map +1 -1
- package/dist/cli.cjs +52 -28
- package/dist/cli.cjs.map +1 -1
- package/dist/index.js +2 -2
- package/dist/index.js.map +1 -1
- package/dist/server/express-server.cjs +2 -2
- package/dist/server/express-server.cjs.map +1 -1
- package/package.json +2 -2
- package/src/slash-commands/READY.manifest.json +1 -1
package/README.md
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# MARIA - AI Development Platform v4.2.
|
|
1
|
+
# MARIA - AI Development Platform v4.2.31
|
|
2
2
|
|
|
3
3
|
[](https://www.npmjs.com/package/@bonginkan/maria)
|
|
4
4
|
[](LICENSE)
|
|
@@ -10,7 +10,7 @@
|
|
|
10
10
|
|
|
11
11
|
> **Enterprise-grade AI development platform with 100% command availability and comprehensive fallback support**
|
|
12
12
|
|
|
13
|
-
## 🚀 What's New in v4.2.
|
|
13
|
+
## 🚀 What's New in v4.2.31 (September 3, 2025)
|
|
14
14
|
|
|
15
15
|
### 🏗️ Build Status - All Systems Operational ✅
|
|
16
16
|
- **CLI NPM Package**: ESM + CJS builds successful (2.02MB/1.16MB)
|
|
@@ -748,10 +748,10 @@ await secureWorkflow.executeWithAuth(workflowDefinition, securityContext);
|
|
|
748
748
|
### Quick Installation
|
|
749
749
|
```bash
|
|
750
750
|
# Install globally (recommended)
|
|
751
|
-
npm install -g @bonginkan/maria@4.2.
|
|
751
|
+
npm install -g @bonginkan/maria@4.2.31
|
|
752
752
|
|
|
753
753
|
# Verify installation
|
|
754
|
-
maria --version # Should show v4.2.
|
|
754
|
+
maria --version # Should show v4.2.31
|
|
755
755
|
|
|
756
756
|
# Initialize with authentication
|
|
757
757
|
maria /login # Setup OAuth2.0 + PKCE authentication
|
|
@@ -952,7 +952,7 @@ MARIA CODE is distributed under a comprehensive licensing system designed for in
|
|
|
952
952
|
|
|
953
953
|
*MARIA v4.1.4 represents the pinnacle of multimodal AI development platform evolution - combining revolutionary voice-to-code capabilities, advanced memory systems, and comprehensive command ecosystems with enterprise-grade security and performance. This release establishes MARIA as the definitive choice for developers and Fortune 500 enterprises seeking intelligent, multimodal development experiences with GraphRAG intelligence, multilingual support, and zero-anxiety coding workflows.*
|
|
954
954
|
|
|
955
|
-
**Transform your development experience today**: `npm install -g @bonginkan/maria@4.2.
|
|
955
|
+
**Transform your development experience today**: `npm install -g @bonginkan/maria@4.2.31`
|
|
956
956
|
|
|
957
957
|
🌐 **Official Website**: [https://maria-code.ai](https://maria-code.ai)
|
|
958
958
|
💬 **Community**: [https://discord.gg/SMSmSGcEQy](https://discord.gg/SMSmSGcEQy)
|
package/dist/READY.manifest.json
CHANGED
package/dist/bin/maria.cjs
CHANGED
|
@@ -500,7 +500,7 @@ var DEFAULT_PROVIDER2, DEFAULT_MODEL2;
|
|
|
500
500
|
var init_config = __esm({
|
|
501
501
|
"src/providers/config.ts"() {
|
|
502
502
|
DEFAULT_PROVIDER2 = process.env.DEFAULT_PROVIDER || "openai";
|
|
503
|
-
DEFAULT_MODEL2 = process.env.
|
|
503
|
+
DEFAULT_MODEL2 = process.env.OPENAI_MODEL || process.env.MARIA_DEFAULT_MODEL || "gpt-5-mini";
|
|
504
504
|
}
|
|
505
505
|
});
|
|
506
506
|
|
|
@@ -7696,13 +7696,16 @@ var init_manager = __esm({
|
|
|
7696
7696
|
if (h2.ok) return p;
|
|
7697
7697
|
}
|
|
7698
7698
|
const order = [
|
|
7699
|
+
// Prefer cloud OpenAI when configured
|
|
7699
7700
|
"openai",
|
|
7701
|
+
// Prioritize local providers next for offline/dev environments
|
|
7702
|
+
"lmstudio",
|
|
7703
|
+
"ollama",
|
|
7704
|
+
"vllm",
|
|
7705
|
+
// Other clouds after local options
|
|
7700
7706
|
"anthropic",
|
|
7701
7707
|
"google",
|
|
7702
7708
|
"grok",
|
|
7703
|
-
"ollama",
|
|
7704
|
-
"lmstudio",
|
|
7705
|
-
"vllm",
|
|
7706
7709
|
"groq"
|
|
7707
7710
|
];
|
|
7708
7711
|
for (const id of order) {
|
|
@@ -7750,8 +7753,8 @@ var init_manager = __esm({
|
|
|
7750
7753
|
/** Legacy sync method kept for backward compatibility (minimal) */
|
|
7751
7754
|
getAvailableModels() {
|
|
7752
7755
|
const out = [];
|
|
7753
|
-
if (this.available.has("openai")) out.push("gpt-
|
|
7754
|
-
if (this.available.has("anthropic")) out.push("claude-
|
|
7756
|
+
if (this.available.has("openai")) out.push("gpt-5", "gpt-5-mini", "gpt-4o", "gpt-4o-mini");
|
|
7757
|
+
if (this.available.has("anthropic")) out.push("claude-opus-4-1-20250805", "claude-opus-4-20250514", "claude-sonnet-4-20250514");
|
|
7755
7758
|
if (this.available.has("google")) out.push("gemini-2.5-pro", "gemini-2.5-flash");
|
|
7756
7759
|
if (this.available.has("grok")) out.push("grok-4", "grok-beta");
|
|
7757
7760
|
return out;
|
|
@@ -7877,12 +7880,15 @@ var init_manager = __esm({
|
|
|
7877
7880
|
if (!apiKey) throw new Error("OpenAI API key not configured");
|
|
7878
7881
|
const controller = new AbortController();
|
|
7879
7882
|
const timeout = setTimeout(() => controller.abort(), 3e5);
|
|
7880
|
-
const modelName = req.model || "gpt-5-mini
|
|
7881
|
-
const isGPT5 = modelName.
|
|
7883
|
+
const modelName = req.model || process.env.OPENAI_MODEL || "gpt-5-mini";
|
|
7884
|
+
const isGPT5 = /\bgpt-5\b/i.test(modelName) || modelName.toLowerCase().startsWith("gpt-5");
|
|
7882
7885
|
const bodyParams = {
|
|
7883
7886
|
model: modelName,
|
|
7884
7887
|
messages: [
|
|
7885
|
-
{
|
|
7888
|
+
{
|
|
7889
|
+
role: "system",
|
|
7890
|
+
content: "You are a helpful assistant. Always respond in English. Provide direct, clear answers without menus or numbered options."
|
|
7891
|
+
},
|
|
7886
7892
|
{ role: "user", content: prompt }
|
|
7887
7893
|
]
|
|
7888
7894
|
};
|
|
@@ -7907,7 +7913,7 @@ var init_manager = __esm({
|
|
|
7907
7913
|
throw new Error(`OpenAI ${res.status}: ${txt}`);
|
|
7908
7914
|
}
|
|
7909
7915
|
const json = await res.json();
|
|
7910
|
-
return { content: json.choices?.[0]?.message?.content ?? "" };
|
|
7916
|
+
return { content: json.choices?.[0]?.message?.content ?? "", model: json.model };
|
|
7911
7917
|
} catch (error2) {
|
|
7912
7918
|
clearTimeout(timeout);
|
|
7913
7919
|
if (error2.name === "AbortError" || error2.message?.includes("abort")) {
|
|
@@ -24329,8 +24335,8 @@ var require_package = __commonJS({
|
|
|
24329
24335
|
"package.json"(exports, module) {
|
|
24330
24336
|
module.exports = {
|
|
24331
24337
|
name: "@bonginkan/maria",
|
|
24332
|
-
version: "4.2.
|
|
24333
|
-
description: "\u{1F680} MARIA v4.2.
|
|
24338
|
+
version: "4.2.31",
|
|
24339
|
+
description: "\u{1F680} MARIA v4.2.31 - Enterprise AI Development Platform with 100% Command Availability. Features 74 production-ready commands with comprehensive fallback implementation, local LLM support, and zero external dependencies. Includes natural language coding, AI safety evaluation, intelligent evolution system, episodic memory with PII masking, and real-time monitoring dashboard. Built with TypeScript AST-powered code generation, OAuth2.0 + PKCE authentication, quantum-resistant cryptography, and enterprise-grade performance.",
|
|
24334
24340
|
keywords: [
|
|
24335
24341
|
"ai",
|
|
24336
24342
|
"cli",
|
|
@@ -26536,7 +26542,7 @@ var init_AuthenticationManager = __esm({
|
|
|
26536
26542
|
const response2 = await fetch(`${this.apiBase}/api/user/profile`, {
|
|
26537
26543
|
headers: {
|
|
26538
26544
|
"Authorization": `Bearer ${tokens.accessToken}`,
|
|
26539
|
-
"User-Agent": `maria-cli/${process.env.CLI_VERSION || "4.2.
|
|
26545
|
+
"User-Agent": `maria-cli/${process.env.CLI_VERSION || "4.2.31"}`
|
|
26540
26546
|
}
|
|
26541
26547
|
});
|
|
26542
26548
|
if (response2.status === 401) {
|
|
@@ -29238,6 +29244,17 @@ var init_ai_response_service = __esm({
|
|
|
29238
29244
|
try {
|
|
29239
29245
|
await this.providerManager.initialize();
|
|
29240
29246
|
this.initialized = true;
|
|
29247
|
+
try {
|
|
29248
|
+
const noOpenAI = !process.env.OPENAI_API_KEY;
|
|
29249
|
+
const isMac = process.platform === "darwin";
|
|
29250
|
+
if (isMac && noOpenAI) {
|
|
29251
|
+
const available = new Set(this.providerManager.getAvailableProviders());
|
|
29252
|
+
if (available.has("lmstudio")) {
|
|
29253
|
+
this.providerManager.setActiveProvider("lmstudio");
|
|
29254
|
+
}
|
|
29255
|
+
}
|
|
29256
|
+
} catch {
|
|
29257
|
+
}
|
|
29241
29258
|
} catch {
|
|
29242
29259
|
this.initialized = false;
|
|
29243
29260
|
}
|
|
@@ -29256,13 +29273,19 @@ var init_ai_response_service = __esm({
|
|
|
29256
29273
|
*/
|
|
29257
29274
|
async callLLM(prompt, opts = {}) {
|
|
29258
29275
|
const {
|
|
29259
|
-
system = PLAIN_OUTPUT ? "Return ONLY the answer (or ONLY code). No menus, no lists, no guided flows." : "You are a helpful senior engineer. Provide direct, production-quality answers.",
|
|
29276
|
+
system = PLAIN_OUTPUT ? "Return ONLY the answer (or ONLY code). No menus, no lists, no guided flows. Always respond in English." : "You are a helpful senior engineer. Always respond in English. Provide direct, production-quality answers.",
|
|
29260
29277
|
model = DEFAULT_MODEL2,
|
|
29261
29278
|
provider = DEFAULT_PROVIDER2,
|
|
29262
29279
|
temperature = 0.2,
|
|
29263
29280
|
maxTokens = 32e3
|
|
29264
29281
|
} = opts;
|
|
29265
29282
|
try {
|
|
29283
|
+
if (provider) {
|
|
29284
|
+
try {
|
|
29285
|
+
this.providerManager.setCurrentProvider(provider);
|
|
29286
|
+
} catch {
|
|
29287
|
+
}
|
|
29288
|
+
}
|
|
29266
29289
|
const res = await this.providerManager.complete({
|
|
29267
29290
|
prompt: `${system}
|
|
29268
29291
|
|
|
@@ -47703,7 +47726,7 @@ var init_about_command = __esm({
|
|
|
47703
47726
|
async execute(args2, context2) {
|
|
47704
47727
|
const output3 = [];
|
|
47705
47728
|
output3.push("");
|
|
47706
|
-
output3.push(chalk27__default.default.cyan.bold("\u{1F916} About MARIA v4.2.
|
|
47729
|
+
output3.push(chalk27__default.default.cyan.bold("\u{1F916} About MARIA v4.2.31"));
|
|
47707
47730
|
output3.push(chalk27__default.default.gray("\u2550".repeat(40)));
|
|
47708
47731
|
output3.push("");
|
|
47709
47732
|
output3.push(chalk27__default.default.white.bold("MARIA - Minimal API, Maximum Power"));
|
|
@@ -58887,17 +58910,17 @@ async function handleCodeCommand(prompt) {
|
|
|
58887
58910
|
const response2 = await generateStrictCode(prompt);
|
|
58888
58911
|
spinner.stop();
|
|
58889
58912
|
if (response2) {
|
|
58890
|
-
console.log(response2);
|
|
58913
|
+
console.log(chalk27__default.default.green(response2));
|
|
58891
58914
|
const { language, code, extension } = extractCodeInfo(response2);
|
|
58892
58915
|
const filename = generateCodeFilename(prompt, language, extension);
|
|
58893
58916
|
const filepath = path9__namespace.resolve(process.cwd(), filename);
|
|
58894
58917
|
await fsp__namespace.writeFile(filepath, code, "utf-8");
|
|
58895
58918
|
console.log(
|
|
58896
|
-
chalk27__default.default.green("\n\u2705
|
|
58897
|
-
`) + chalk27__default.default.
|
|
58898
|
-
`) + chalk27__default.default.
|
|
58899
|
-
`) + chalk27__default.default.
|
|
58900
|
-
`) + chalk27__default.default.
|
|
58919
|
+
chalk27__default.default.green("\n\u2705 Code Saved\n") + chalk27__default.default.green(`\u{1F4C1} File (Click to open):
|
|
58920
|
+
`) + chalk27__default.default.green(`\u2022 [${filename}](file://${filepath})
|
|
58921
|
+
`) + chalk27__default.default.green(` \u{1F4CD} Path: \`${filepath}\`
|
|
58922
|
+
`) + chalk27__default.default.green(` \u{1F4DD} Language: ${language}
|
|
58923
|
+
`) + chalk27__default.default.green(`
|
|
58901
58924
|
\u{1F4A1} Tip: Command+Click (Mac) or Ctrl+Click (Windows/Linux) to open file`)
|
|
58902
58925
|
);
|
|
58903
58926
|
} else {
|
|
@@ -58912,17 +58935,18 @@ async function handleCodeCommand(prompt) {
|
|
|
58912
58935
|
console.log(
|
|
58913
58936
|
chalk27__default.default.yellow("\u26A0 AI unavailable, using template fallback:\n")
|
|
58914
58937
|
);
|
|
58915
|
-
console.log(fallbackCode);
|
|
58938
|
+
console.log(chalk27__default.default.green(fallbackCode));
|
|
58916
58939
|
try {
|
|
58917
58940
|
const { language, code, extension } = extractCodeInfo(fallbackCode);
|
|
58918
58941
|
const filename = generateCodeFilename(prompt, language, extension);
|
|
58919
58942
|
const filepath = path9__namespace.resolve(process.cwd(), filename);
|
|
58920
58943
|
await fsp__namespace.writeFile(filepath, code, "utf-8");
|
|
58921
58944
|
console.log(
|
|
58922
|
-
chalk27__default.default.green("\n\u2705
|
|
58923
|
-
`) + chalk27__default.default.
|
|
58924
|
-
`) + chalk27__default.default.
|
|
58925
|
-
`) + chalk27__default.default.
|
|
58945
|
+
chalk27__default.default.green("\n\u2705 Template Code Saved\n") + chalk27__default.default.green(`\u{1F4C1} File (Click to open):
|
|
58946
|
+
`) + chalk27__default.default.green(`\u2022 [${filename}](file://${filepath})
|
|
58947
|
+
`) + chalk27__default.default.green(` \u{1F4CD} Path: \`${filepath}\`
|
|
58948
|
+
`) + chalk27__default.default.green(` \u{1F4DD} Language: ${language}
|
|
58949
|
+
`) + chalk27__default.default.green(`
|
|
58926
58950
|
\u{1F4A1} Tip: Command+Click (Mac) or Ctrl+Click (Windows/Linux) to open file`)
|
|
58927
58951
|
);
|
|
58928
58952
|
} catch (saveError) {
|
|
@@ -59141,9 +59165,9 @@ async function streamAnswer(text) {
|
|
|
59141
59165
|
});
|
|
59142
59166
|
animation.stop();
|
|
59143
59167
|
if (ai.streamResponse) {
|
|
59144
|
-
await ai.streamResponse(resp, (line) => console.log(line));
|
|
59168
|
+
await ai.streamResponse(resp, (line) => console.log(chalk27__default.default.green(line)));
|
|
59145
59169
|
} else {
|
|
59146
|
-
console.log(resp);
|
|
59170
|
+
console.log(chalk27__default.default.green(resp));
|
|
59147
59171
|
}
|
|
59148
59172
|
const msg = { role: "assistant", content: resp, timestamp: /* @__PURE__ */ new Date() };
|
|
59149
59173
|
session.push(msg);
|