@bonginkan/maria 4.3.26 → 4.3.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.cjs CHANGED
@@ -1704,7 +1704,7 @@ var init_AuthenticationManager = __esm({
1704
1704
  const response = await fetch(`${this.apiBase}/api/user/profile`, {
1705
1705
  headers: {
1706
1706
  "Authorization": `Bearer ${tokens2.accessToken}`,
1707
- "User-Agent": `maria-cli/${process.env.CLI_VERSION || "4.3.26"}`
1707
+ "User-Agent": `maria-cli/${process.env.CLI_VERSION || "4.3.27"}`
1708
1708
  }
1709
1709
  });
1710
1710
  if (response.status === 401) {
@@ -2407,7 +2407,7 @@ async function callApi(path60, init3 = {}) {
2407
2407
  "Authorization": `Bearer ${token}`,
2408
2408
  "X-Device-Id": getDeviceId(),
2409
2409
  "X-Session-Id": getSessionId() || "",
2410
- "User-Agent": `maria-cli/${process.env.CLI_VERSION || "4.3.26"}`,
2410
+ "User-Agent": `maria-cli/${process.env.CLI_VERSION || "4.3.27"}`,
2411
2411
  "Content-Type": init3.headers?.["Content-Type"] || "application/json"
2412
2412
  });
2413
2413
  const doFetch = async (token) => {
@@ -16121,8 +16121,8 @@ var require_package = __commonJS({
16121
16121
  "package.json"(exports, module) {
16122
16122
  module.exports = {
16123
16123
  name: "@bonginkan/maria",
16124
- version: "4.3.26",
16125
- description: "\u{1F680} MARIA v4.3.26 - Enterprise AI Development Platform with identity system and character voice implementation. Features 74 production-ready commands with comprehensive fallback implementation, local LLM support, and zero external dependencies. Includes natural language coding, AI safety evaluation, intelligent evolution system, episodic memory with PII masking, and real-time monitoring dashboard. Built with TypeScript AST-powered code generation, OAuth2.0 + PKCE authentication, quantum-resistant cryptography, and enterprise-grade performance.",
16124
+ version: "4.3.27",
16125
+ description: "\u{1F680} MARIA v4.3.27 - Enterprise AI Development Platform with identity system and character voice implementation. Features 74 production-ready commands with comprehensive fallback implementation, local LLM support, and zero external dependencies. Includes natural language coding, AI safety evaluation, intelligent evolution system, episodic memory with PII masking, and real-time monitoring dashboard. Built with TypeScript AST-powered code generation, OAuth2.0 + PKCE authentication, quantum-resistant cryptography, and enterprise-grade performance.",
16126
16126
  keywords: [
16127
16127
  "ai",
16128
16128
  "cli",
@@ -25753,7 +25753,7 @@ var init_about_command = __esm({
25753
25753
  async execute(args2, context2) {
25754
25754
  const output3 = [];
25755
25755
  output3.push("");
25756
- output3.push(chalk40__default.default.cyan.bold("\u{1F916} About MARIA v4.3.26"));
25756
+ output3.push(chalk40__default.default.cyan.bold("\u{1F916} About MARIA v4.3.27"));
25757
25757
  output3.push(chalk40__default.default.gray("\u2550".repeat(40)));
25758
25758
  output3.push("");
25759
25759
  output3.push(chalk40__default.default.white.bold("MARIA - Minimal API, Maximum Power"));
@@ -39001,6 +39001,7 @@ async function orchestrate(request, opts) {
39001
39001
  "At the very start of each file, include a filename hint as a comment on the first line:",
39002
39002
  "// filename: <name.ext> (or # filename: <name.ext> for Python, <!-- filename: name.ext --> for HTML/CSS)",
39003
39003
  "If multiple files are required, use the strict multi-file protocol with one or more file sections:",
39004
+ "Make sure you place every file in a structured folders, with the top level folder being '{project_name: proper project name per request}'.",
39004
39005
  "[BEGIN file: path]\n<content>\n[END]",
39005
39006
  "Do not include any prose before/after; no menus/questions/suggestions; start immediately with ``` or [BEGIN file: ...]."
39006
39007
  ].join("\n");
@@ -59739,7 +59740,7 @@ var init_ai_response_service = __esm({
59739
59740
  */
59740
59741
  async callLLM(prompt, opts = {}) {
59741
59742
  const {
59742
- system = PLAIN_OUTPUT ? "Return ONLY the answer (or ONLY code). No menus, no lists, no guided flows. Always respond in English." : "You are a helpful senior engineer. Always respond in English. Provide direct, production-quality answers. Make sure you answer in plain text, as a natural chat.",
59743
+ system = PLAIN_OUTPUT ? "Return ONLY the answer (or ONLY code). No menus, no lists, no guided flows. Always respond in English." : "You are a helpful senior engineer named Maria. Always respond in English. Provide direct, production-quality answers. Make sure you answer in plain text, as a natural chat.",
59743
59744
  model = void 0,
59744
59745
  provider = DEFAULT_PROVIDER2,
59745
59746
  temperature = 0.2,