pentesting 0.70.2 → 0.70.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/main.js +24 -11
  2. package/package.json +3 -3
package/dist/main.js CHANGED
@@ -727,7 +727,7 @@ var INPUT_PROMPT_PATTERNS = [
727
727
 
728
728
  // src/shared/constants/agent.ts
729
729
  var APP_NAME = "Pentest AI";
730
- var APP_VERSION = "0.70.2";
730
+ var APP_VERSION = "0.70.3";
731
731
  var APP_DESCRIPTION = "Autonomous Penetration Testing AI Agent";
732
732
  var LLM_ROLES = {
733
733
  SYSTEM: "system",
@@ -11748,19 +11748,19 @@ var LLMClient = class {
11748
11748
  debugLog("llm", `[${requestId}] Stream request START`, { model: this.model, toolCount: tools?.length, toolNames: tools?.map((t) => t.name), thinking: !!thinking });
11749
11749
  const response = await makeRequest(this.baseUrl, this.apiKey, requestBody, callbacks?.abortSignal);
11750
11750
  const { context } = createStreamContext(callbacks);
11751
- let fullContent = "";
11752
- let fullReasoning = "";
11751
+ const contentChunks = [];
11752
+ const reasoningChunks = [];
11753
11753
  let usage = { input_tokens: 0, output_tokens: 0 };
11754
11754
  const currentBlockRef = { value: null };
11755
11755
  const toolCallsMap = /* @__PURE__ */ new Map();
11756
11756
  let totalChars = 0;
11757
11757
  let wasAborted = false;
11758
11758
  context.onContent = (text) => {
11759
- fullContent += text;
11759
+ contentChunks.push(text);
11760
11760
  totalChars += text.length;
11761
11761
  };
11762
11762
  context.onReasoning = (text) => {
11763
- fullReasoning += text;
11763
+ reasoningChunks.push(text);
11764
11764
  totalChars += text.length;
11765
11765
  };
11766
11766
  context.onUsage = (u) => {
@@ -11771,7 +11771,7 @@ var LLMClient = class {
11771
11771
  context.toolCallsMap = toolCallsMap;
11772
11772
  wasAborted = await readSSEStream(response, requestId, context, callbacks?.abortSignal);
11773
11773
  const toolCalls = resolveToolCalls(toolCallsMap);
11774
- const stripped = stripThinkTags(fullContent, fullReasoning);
11774
+ const stripped = stripThinkTags(contentChunks.join(""), reasoningChunks.join(""));
11775
11775
  return {
11776
11776
  content: stripped.cleanText,
11777
11777
  toolCalls: toolCalls.length > 0 ? toolCalls : void 0,
@@ -12261,7 +12261,7 @@ function handleToolResult(result, call, outputText, progress) {
12261
12261
  }
12262
12262
 
12263
12263
  // src/shared/utils/context-digest/constants.ts
12264
- var PASSTHROUGH_THRESHOLD = 500;
12264
+ var PASSTHROUGH_THRESHOLD = 2e3;
12265
12265
  var PREPROCESS_THRESHOLD = 3e3;
12266
12266
  var MAX_PREPROCESSED_LINES = 800;
12267
12267
  var MAX_DUPLICATE_DISPLAY = 3;
@@ -14888,7 +14888,14 @@ var TUI_DISPLAY_LIMITS = {
14888
14888
  /** Max chars for thinking block first-line summary */
14889
14889
  thinkingSummaryChars: 72,
14890
14890
  /** Delay before exit to allow Ink to cleanup */
14891
- EXIT_DELAY: 100
14891
+ EXIT_DELAY: 100,
14892
+ /**
14893
+ * Maximum number of messages to keep in React state (TUI message list).
14894
+ * WHY: addMessage() uses [...prev, newMsg] spreading — without a cap, long
14895
+ * sessions accumulate thousands of messages and RAM grows without bound.
14896
+ * Oldest messages are pruned first; all content is preserved in the disk archive.
14897
+ */
14898
+ MAX_MESSAGES: 500
14892
14899
  };
14893
14900
 
14894
14901
  // src/platform/tui/hooks/useAgentState.ts
@@ -14912,7 +14919,13 @@ var useAgentState = () => {
14912
14919
  const toolStartedAtRef = useRef(0);
14913
14920
  const addMessage = useCallback((type, content) => {
14914
14921
  const id = generateId();
14915
- setMessages((prev) => [...prev, { id, type, content, timestamp: /* @__PURE__ */ new Date() }]);
14922
+ setMessages((prev) => {
14923
+ const next = [...prev, { id, type, content, timestamp: /* @__PURE__ */ new Date() }];
14924
+ if (next.length > TUI_DISPLAY_LIMITS.MAX_MESSAGES) {
14925
+ return next.slice(next.length - TUI_DISPLAY_LIMITS.MAX_MESSAGES);
14926
+ }
14927
+ return next;
14928
+ });
14916
14929
  }, []);
14917
14930
  const resetCumulativeCounters = useCallback(() => {
14918
14931
  setCurrentTokens(0);
@@ -16868,7 +16881,7 @@ var FRAMES = [
16868
16881
  "\u2727",
16869
16882
  "\u2726"
16870
16883
  ];
16871
- var INTERVAL = 100;
16884
+ var INTERVAL = 150;
16872
16885
  var MusicSpinner = memo8(({ color }) => {
16873
16886
  const [index, setIndex] = useState6(0);
16874
16887
  useEffect7(() => {
@@ -16915,7 +16928,7 @@ import { Box as Box10, Text as Text12 } from "ink";
16915
16928
  import { useState as useState7, useEffect as useEffect8, memo as memo9 } from "react";
16916
16929
  import { Text as Text11 } from "ink";
16917
16930
  import { jsx as jsx11 } from "react/jsx-runtime";
16918
- var FRAME_INTERVAL2 = 60;
16931
+ var FRAME_INTERVAL2 = 120;
16919
16932
  var WAVE_SPEED2 = 0.25;
16920
16933
  var CHAR_PHASE_GAP = 0.55;
16921
16934
  function sinToColor(sin) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "pentesting",
3
- "version": "0.70.2",
3
+ "version": "0.70.3",
4
4
  "description": "Autonomous Penetration Testing AI Agent",
5
5
  "type": "module",
6
6
  "main": "dist/main.js",
@@ -35,9 +35,9 @@
35
35
  "type": "git",
36
36
  "url": "git+https://github.com/agnusdei1207"
37
37
  },
38
- "homepage": "https://agnusdei1207.github.io/brainscience/",
38
+ "homepage": "https://agnusdei1207.github.io/brainscience/pentesting",
39
39
  "bugs": {
40
- "url": "https://agnusdei1207.github.io/brainscience/"
40
+ "url": "https://agnusdei1207.github.io/brainscience/pentesting"
41
41
  },
42
42
  "keywords": [
43
43
  "penetration-testing",