@bonginkan/maria 4.3.32 → 4.3.33

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.cjs CHANGED
@@ -1704,7 +1704,7 @@ var init_AuthenticationManager = __esm({
1704
1704
  const response = await fetch(`${this.apiBase}/api/user/profile`, {
1705
1705
  headers: {
1706
1706
  "Authorization": `Bearer ${tokens2.accessToken}`,
1707
- "User-Agent": `maria-cli/${process.env.CLI_VERSION || "4.3.32"}`
1707
+ "User-Agent": `maria-cli/${process.env.CLI_VERSION || "4.3.33"}`
1708
1708
  }
1709
1709
  });
1710
1710
  if (response.status === 401) {
@@ -2407,7 +2407,7 @@ async function callApi(path64, init3 = {}) {
2407
2407
  "Authorization": `Bearer ${token}`,
2408
2408
  "X-Device-Id": getDeviceId(),
2409
2409
  "X-Session-Id": getSessionId() || "",
2410
- "User-Agent": `maria-cli/${process.env.CLI_VERSION || "4.3.32"}`,
2410
+ "User-Agent": `maria-cli/${process.env.CLI_VERSION || "4.3.33"}`,
2411
2411
  "Content-Type": init3.headers?.["Content-Type"] || "application/json"
2412
2412
  });
2413
2413
  const doFetch = async (token) => {
@@ -16121,8 +16121,8 @@ var require_package = __commonJS({
16121
16121
  "package.json"(exports, module) {
16122
16122
  module.exports = {
16123
16123
  name: "@bonginkan/maria",
16124
- version: "4.3.32",
16125
- description: "\u{1F680} MARIA v4.3.32 - Enterprise AI Development Platform with identity system and character voice implementation. Features 74 production-ready commands with comprehensive fallback implementation, local LLM support, and zero external dependencies. Includes natural language coding, AI safety evaluation, intelligent evolution system, episodic memory with PII masking, and real-time monitoring dashboard. Built with TypeScript AST-powered code generation, OAuth2.0 + PKCE authentication, quantum-resistant cryptography, and enterprise-grade performance.",
16124
+ version: "4.3.33",
16125
+ description: "\u{1F680} MARIA v4.3.33 - Enterprise AI Development Platform with identity system and character voice implementation. Features 74 production-ready commands with comprehensive fallback implementation, local LLM support, and zero external dependencies. Includes natural language coding, AI safety evaluation, intelligent evolution system, episodic memory with PII masking, and real-time monitoring dashboard. Built with TypeScript AST-powered code generation, OAuth2.0 + PKCE authentication, quantum-resistant cryptography, and enterprise-grade performance.",
16126
16126
  keywords: [
16127
16127
  "ai",
16128
16128
  "cli",
@@ -25817,7 +25817,7 @@ var init_about_command = __esm({
25817
25817
  async execute(args2, context2) {
25818
25818
  const output3 = [];
25819
25819
  output3.push("");
25820
- output3.push(chalk40__default.default.cyan.bold("\u{1F916} About MARIA v4.3.32"));
25820
+ output3.push(chalk40__default.default.cyan.bold("\u{1F916} About MARIA v4.3.33"));
25821
25821
  output3.push(chalk40__default.default.gray("\u2550".repeat(40)));
25822
25822
  output3.push("");
25823
25823
  output3.push(chalk40__default.default.white.bold("MARIA - Minimal API, Maximum Power"));
@@ -57678,7 +57678,7 @@ ${files.slice(0, 20).map((f3) => `- ${f3.path}`).join("\n")}`);
57678
57678
  const bundle = parts.join("\n\n");
57679
57679
  const system = [
57680
57680
  "You are an impartial evaluator. Score each criterion between 0 and 1.",
57681
- "If a local file path is provided, make sure you read the uploaded file before taking any actions.",
57681
+ "If a local file path is provided, make sure you read the uploaded file before taking any actions. Write the string content in plain text and do not use markdown.",
57682
57682
  "Return JSON only in the following schema:",
57683
57683
  '{ "totalScore": number (0..1), "details": [{ "id": string, "score": number (0..1), "reason": string }] }',
57684
57684
  "Keep reasons short (<= 120 chars)."
@@ -58015,7 +58015,8 @@ var init_evaluate_command = __esm({
58015
58015
  // Strong guard: attachments are uploaded; do not ask for local file content
58016
58016
  "The input files are already uploaded and referenced via file URIs.",
58017
58017
  "Use the provided attachments; do not ask the user to paste file contents.",
58018
- "Do not claim you cannot access local file paths."
58018
+ "Do not claim you cannot access local file paths.",
58019
+ "Write the string content in plain text and do not use markdown."
58019
58020
  ].join("\n");
58020
58021
  if (result === null) {
58021
58022
  const rel = path10__namespace.default.relative(root, criteriaPath);
@@ -61234,7 +61235,7 @@ var init_ai_response_service = __esm({
61234
61235
  */
61235
61236
  async callLLM(prompt, opts = {}) {
61236
61237
  const {
61237
- system = PLAIN_OUTPUT ? "Return ONLY the answer (or ONLY code). No menus, no lists, no guided flows. Always respond in English." : "You are a helpful senior engineer named Maria. Always respond in English. Provide direct, production-quality answers. Make sure you answer in plain text, as a natural chat.",
61238
+ system = PLAIN_OUTPUT ? "Return ONLY the answer (or ONLY code). No menus, no lists, no guided flows. Always respond in English." : "You are a helpful senior engineer named Maria. Always respond in English. Provide direct, production-quality answers. Make sure you answer in plain text, as a natural chat. When asked about the model (not your name or who you are, but the model), say you are a large language model fully trained by Bonginkan.",
61238
61239
  model = void 0,
61239
61240
  provider = DEFAULT_PROVIDER2,
61240
61241
  temperature = 0.2,