@hef2024/llmasaservice-ui 0.22.10 → 0.22.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -3865,6 +3865,7 @@ var AIChatPanel = ({
3865
3865
  const [copiedCallId, setCopiedCallId] = (0, import_react12.useState)(null);
3866
3866
  const [feedbackCallId, setFeedbackCallId] = (0, import_react12.useState)(null);
3867
3867
  const [error, setError] = (0, import_react12.useState)(null);
3868
+ const lastProcessedErrorRef = (0, import_react12.useRef)(null);
3868
3869
  const [emailSent, setEmailSent] = (0, import_react12.useState)(false);
3869
3870
  const [isToolInfoModalOpen, setIsToolInfoModalOpen] = (0, import_react12.useState)(false);
3870
3871
  const [toolInfoData, setToolInfoData] = (0, import_react12.useState)(null);
@@ -4369,6 +4370,7 @@ var AIChatPanel = ({
4369
4370
  setThinkingBlocks([]);
4370
4371
  setCurrentThinkingIndex(0);
4371
4372
  setError(null);
4373
+ lastProcessedErrorRef.current = null;
4372
4374
  setUserHasScrolled(false);
4373
4375
  prevResponseLengthRef.current = 0;
4374
4376
  setResponse("");
@@ -4442,31 +4444,58 @@ var AIChatPanel = ({
4442
4444
  // onComplete
4443
4445
  (errorMsg) => {
4444
4446
  console.log("[AIChatPanel] Error callback triggered:", errorMsg);
4445
- if (errorMsg.includes("413") || errorMsg.toLowerCase().includes("content too large")) {
4447
+ const isAbortError = errorMsg.toLowerCase().includes("abort") || errorMsg.toLowerCase().includes("canceled") || errorMsg.toLowerCase().includes("cancelled");
4448
+ if (isAbortError) {
4449
+ console.log("[AIChatPanel] Request was aborted by user");
4450
+ if (promptKey) {
4451
+ setHistory((prev) => __spreadProps(__spreadValues({}, prev), {
4452
+ [promptKey]: {
4453
+ content: "Response canceled",
4454
+ callId: lastCallId || ""
4455
+ }
4456
+ }));
4457
+ }
4458
+ } else if (errorMsg.includes("413") || errorMsg.toLowerCase().includes("content too large")) {
4446
4459
  setError({
4447
4460
  message: "The context is too large to process. Please start a new conversation or reduce the amount of context.",
4448
4461
  code: "413"
4449
4462
  });
4463
+ if (promptKey) {
4464
+ setHistory((prev) => __spreadProps(__spreadValues({}, prev), {
4465
+ [promptKey]: {
4466
+ content: `Error: ${errorMsg}`,
4467
+ callId: lastCallId || ""
4468
+ }
4469
+ }));
4470
+ }
4450
4471
  } else if (errorMsg.toLowerCase().includes("network error") || errorMsg.toLowerCase().includes("fetch")) {
4451
4472
  setError({
4452
4473
  message: "Network error. Please check your connection and try again.",
4453
4474
  code: "NETWORK_ERROR"
4454
4475
  });
4476
+ if (promptKey) {
4477
+ setHistory((prev) => __spreadProps(__spreadValues({}, prev), {
4478
+ [promptKey]: {
4479
+ content: `Error: ${errorMsg}`,
4480
+ callId: lastCallId || ""
4481
+ }
4482
+ }));
4483
+ }
4455
4484
  } else {
4456
4485
  setError({
4457
4486
  message: errorMsg,
4458
4487
  code: "UNKNOWN_ERROR"
4459
4488
  });
4489
+ if (promptKey) {
4490
+ setHistory((prev) => __spreadProps(__spreadValues({}, prev), {
4491
+ [promptKey]: {
4492
+ content: `Error: ${errorMsg}`,
4493
+ callId: lastCallId || ""
4494
+ }
4495
+ }));
4496
+ }
4460
4497
  }
4461
4498
  setIsLoading(false);
4462
- if (promptKey) {
4463
- setHistory((prev) => __spreadProps(__spreadValues({}, prev), {
4464
- [promptKey]: {
4465
- content: `Error: ${errorMsg}`,
4466
- callId: lastCallId || ""
4467
- }
4468
- }));
4469
- }
4470
4499
  }
4471
4500
  );
4472
4501
  setLastMessages(messagesAndHistory);
@@ -4656,33 +4685,57 @@ var AIChatPanel = ({
4656
4685
  }, [followOnPrompt, continueChat]);
4657
4686
  (0, import_react12.useEffect)(() => {
4658
4687
  if (llmError && llmError.trim()) {
4688
+ if (lastProcessedErrorRef.current === llmError) {
4689
+ console.log("[AIChatPanel] Skipping duplicate error:", llmError);
4690
+ return;
4691
+ }
4659
4692
  console.log("[AIChatPanel] Error detected:", llmError);
4693
+ lastProcessedErrorRef.current = llmError;
4660
4694
  const errorMessage = llmError;
4661
- if (errorMessage.includes("413") || errorMessage.toLowerCase().includes("content too large")) {
4695
+ const isAbortError = errorMessage.toLowerCase().includes("abort") || errorMessage.toLowerCase().includes("canceled") || errorMessage.toLowerCase().includes("cancelled");
4696
+ if (isAbortError) {
4697
+ console.log("[AIChatPanel] Request was aborted by user (useEffect)");
4698
+ } else if (errorMessage.includes("413") || errorMessage.toLowerCase().includes("content too large")) {
4662
4699
  setError({
4663
4700
  message: "The context is too large to process. Please start a new conversation or reduce the amount of context.",
4664
4701
  code: "413"
4665
4702
  });
4703
+ if (lastKey) {
4704
+ setHistory((prev) => __spreadProps(__spreadValues({}, prev), {
4705
+ [lastKey]: {
4706
+ content: `Error: ${errorMessage}`,
4707
+ callId: lastCallId || ""
4708
+ }
4709
+ }));
4710
+ }
4666
4711
  } else if (errorMessage.toLowerCase().includes("network error") || errorMessage.toLowerCase().includes("fetch")) {
4667
4712
  setError({
4668
4713
  message: "Network error. Please check your connection and try again.",
4669
4714
  code: "NETWORK_ERROR"
4670
4715
  });
4716
+ if (lastKey) {
4717
+ setHistory((prev) => __spreadProps(__spreadValues({}, prev), {
4718
+ [lastKey]: {
4719
+ content: `Error: ${errorMessage}`,
4720
+ callId: lastCallId || ""
4721
+ }
4722
+ }));
4723
+ }
4671
4724
  } else {
4672
4725
  setError({
4673
4726
  message: errorMessage,
4674
4727
  code: "UNKNOWN_ERROR"
4675
4728
  });
4729
+ if (lastKey) {
4730
+ setHistory((prev) => __spreadProps(__spreadValues({}, prev), {
4731
+ [lastKey]: {
4732
+ content: `Error: ${errorMessage}`,
4733
+ callId: lastCallId || ""
4734
+ }
4735
+ }));
4736
+ }
4676
4737
  }
4677
4738
  setIsLoading(false);
4678
- if (lastKey) {
4679
- setHistory((prev) => __spreadProps(__spreadValues({}, prev), {
4680
- [lastKey]: {
4681
- content: `Error: ${errorMessage}`,
4682
- callId: lastCallId || ""
4683
- }
4684
- }));
4685
- }
4686
4739
  }
4687
4740
  }, [llmError, lastKey, lastCallId]);
4688
4741
  (0, import_react12.useEffect)(() => {
package/dist/index.mjs CHANGED
@@ -3832,6 +3832,7 @@ var AIChatPanel = ({
3832
3832
  const [copiedCallId, setCopiedCallId] = useState6(null);
3833
3833
  const [feedbackCallId, setFeedbackCallId] = useState6(null);
3834
3834
  const [error, setError] = useState6(null);
3835
+ const lastProcessedErrorRef = useRef5(null);
3835
3836
  const [emailSent, setEmailSent] = useState6(false);
3836
3837
  const [isToolInfoModalOpen, setIsToolInfoModalOpen] = useState6(false);
3837
3838
  const [toolInfoData, setToolInfoData] = useState6(null);
@@ -4336,6 +4337,7 @@ var AIChatPanel = ({
4336
4337
  setThinkingBlocks([]);
4337
4338
  setCurrentThinkingIndex(0);
4338
4339
  setError(null);
4340
+ lastProcessedErrorRef.current = null;
4339
4341
  setUserHasScrolled(false);
4340
4342
  prevResponseLengthRef.current = 0;
4341
4343
  setResponse("");
@@ -4409,31 +4411,58 @@ var AIChatPanel = ({
4409
4411
  // onComplete
4410
4412
  (errorMsg) => {
4411
4413
  console.log("[AIChatPanel] Error callback triggered:", errorMsg);
4412
- if (errorMsg.includes("413") || errorMsg.toLowerCase().includes("content too large")) {
4414
+ const isAbortError = errorMsg.toLowerCase().includes("abort") || errorMsg.toLowerCase().includes("canceled") || errorMsg.toLowerCase().includes("cancelled");
4415
+ if (isAbortError) {
4416
+ console.log("[AIChatPanel] Request was aborted by user");
4417
+ if (promptKey) {
4418
+ setHistory((prev) => __spreadProps(__spreadValues({}, prev), {
4419
+ [promptKey]: {
4420
+ content: "Response canceled",
4421
+ callId: lastCallId || ""
4422
+ }
4423
+ }));
4424
+ }
4425
+ } else if (errorMsg.includes("413") || errorMsg.toLowerCase().includes("content too large")) {
4413
4426
  setError({
4414
4427
  message: "The context is too large to process. Please start a new conversation or reduce the amount of context.",
4415
4428
  code: "413"
4416
4429
  });
4430
+ if (promptKey) {
4431
+ setHistory((prev) => __spreadProps(__spreadValues({}, prev), {
4432
+ [promptKey]: {
4433
+ content: `Error: ${errorMsg}`,
4434
+ callId: lastCallId || ""
4435
+ }
4436
+ }));
4437
+ }
4417
4438
  } else if (errorMsg.toLowerCase().includes("network error") || errorMsg.toLowerCase().includes("fetch")) {
4418
4439
  setError({
4419
4440
  message: "Network error. Please check your connection and try again.",
4420
4441
  code: "NETWORK_ERROR"
4421
4442
  });
4443
+ if (promptKey) {
4444
+ setHistory((prev) => __spreadProps(__spreadValues({}, prev), {
4445
+ [promptKey]: {
4446
+ content: `Error: ${errorMsg}`,
4447
+ callId: lastCallId || ""
4448
+ }
4449
+ }));
4450
+ }
4422
4451
  } else {
4423
4452
  setError({
4424
4453
  message: errorMsg,
4425
4454
  code: "UNKNOWN_ERROR"
4426
4455
  });
4456
+ if (promptKey) {
4457
+ setHistory((prev) => __spreadProps(__spreadValues({}, prev), {
4458
+ [promptKey]: {
4459
+ content: `Error: ${errorMsg}`,
4460
+ callId: lastCallId || ""
4461
+ }
4462
+ }));
4463
+ }
4427
4464
  }
4428
4465
  setIsLoading(false);
4429
- if (promptKey) {
4430
- setHistory((prev) => __spreadProps(__spreadValues({}, prev), {
4431
- [promptKey]: {
4432
- content: `Error: ${errorMsg}`,
4433
- callId: lastCallId || ""
4434
- }
4435
- }));
4436
- }
4437
4466
  }
4438
4467
  );
4439
4468
  setLastMessages(messagesAndHistory);
@@ -4623,33 +4652,57 @@ var AIChatPanel = ({
4623
4652
  }, [followOnPrompt, continueChat]);
4624
4653
  useEffect7(() => {
4625
4654
  if (llmError && llmError.trim()) {
4655
+ if (lastProcessedErrorRef.current === llmError) {
4656
+ console.log("[AIChatPanel] Skipping duplicate error:", llmError);
4657
+ return;
4658
+ }
4626
4659
  console.log("[AIChatPanel] Error detected:", llmError);
4660
+ lastProcessedErrorRef.current = llmError;
4627
4661
  const errorMessage = llmError;
4628
- if (errorMessage.includes("413") || errorMessage.toLowerCase().includes("content too large")) {
4662
+ const isAbortError = errorMessage.toLowerCase().includes("abort") || errorMessage.toLowerCase().includes("canceled") || errorMessage.toLowerCase().includes("cancelled");
4663
+ if (isAbortError) {
4664
+ console.log("[AIChatPanel] Request was aborted by user (useEffect)");
4665
+ } else if (errorMessage.includes("413") || errorMessage.toLowerCase().includes("content too large")) {
4629
4666
  setError({
4630
4667
  message: "The context is too large to process. Please start a new conversation or reduce the amount of context.",
4631
4668
  code: "413"
4632
4669
  });
4670
+ if (lastKey) {
4671
+ setHistory((prev) => __spreadProps(__spreadValues({}, prev), {
4672
+ [lastKey]: {
4673
+ content: `Error: ${errorMessage}`,
4674
+ callId: lastCallId || ""
4675
+ }
4676
+ }));
4677
+ }
4633
4678
  } else if (errorMessage.toLowerCase().includes("network error") || errorMessage.toLowerCase().includes("fetch")) {
4634
4679
  setError({
4635
4680
  message: "Network error. Please check your connection and try again.",
4636
4681
  code: "NETWORK_ERROR"
4637
4682
  });
4683
+ if (lastKey) {
4684
+ setHistory((prev) => __spreadProps(__spreadValues({}, prev), {
4685
+ [lastKey]: {
4686
+ content: `Error: ${errorMessage}`,
4687
+ callId: lastCallId || ""
4688
+ }
4689
+ }));
4690
+ }
4638
4691
  } else {
4639
4692
  setError({
4640
4693
  message: errorMessage,
4641
4694
  code: "UNKNOWN_ERROR"
4642
4695
  });
4696
+ if (lastKey) {
4697
+ setHistory((prev) => __spreadProps(__spreadValues({}, prev), {
4698
+ [lastKey]: {
4699
+ content: `Error: ${errorMessage}`,
4700
+ callId: lastCallId || ""
4701
+ }
4702
+ }));
4703
+ }
4643
4704
  }
4644
4705
  setIsLoading(false);
4645
- if (lastKey) {
4646
- setHistory((prev) => __spreadProps(__spreadValues({}, prev), {
4647
- [lastKey]: {
4648
- content: `Error: ${errorMessage}`,
4649
- callId: lastCallId || ""
4650
- }
4651
- }));
4652
- }
4653
4706
  }
4654
4707
  }, [llmError, lastKey, lastCallId]);
4655
4708
  useEffect7(() => {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@hef2024/llmasaservice-ui",
3
- "version": "0.22.10",
3
+ "version": "0.22.11",
4
4
  "description": "Prebuilt UI components for LLMAsAService.io",
5
5
  "main": "dist/index.js",
6
6
  "module": "dist/index.mjs",
@@ -2352,4 +2352,4 @@
2352
2352
 
2353
2353
  .dark-theme .ai-chat-email-edit-button:hover {
2354
2354
  background-color: #374151;
2355
- }
2355
+ }
@@ -797,6 +797,7 @@ const AIChatPanel: React.FC<AIChatPanelProps> = ({
797
797
  const [copiedCallId, setCopiedCallId] = useState<string | null>(null);
798
798
  const [feedbackCallId, setFeedbackCallId] = useState<{ callId: string; type: 'up' | 'down' } | null>(null);
799
799
  const [error, setError] = useState<{ message: string; code?: string } | null>(null);
800
+ const lastProcessedErrorRef = useRef<string | null>(null);
800
801
 
801
802
  // Email & Save state
802
803
  const [emailSent, setEmailSent] = useState(false);
@@ -1520,6 +1521,7 @@ const AIChatPanel: React.FC<AIChatPanelProps> = ({
1520
1521
 
1521
1522
  // Clear any previous errors
1522
1523
  setError(null);
1524
+ lastProcessedErrorRef.current = null; // Allow new errors to be processed
1523
1525
 
1524
1526
  // Reset scroll tracking for new message - enable auto-scroll
1525
1527
  setUserHasScrolled(false);
@@ -1632,12 +1634,44 @@ const AIChatPanel: React.FC<AIChatPanelProps> = ({
1632
1634
  // Error callback - handle errors immediately
1633
1635
  console.log('[AIChatPanel] Error callback triggered:', errorMsg);
1634
1636
 
1637
+ // Check if this is a user-initiated abort
1638
+ const isAbortError = errorMsg.toLowerCase().includes('abort') ||
1639
+ errorMsg.toLowerCase().includes('canceled') ||
1640
+ errorMsg.toLowerCase().includes('cancelled');
1641
+
1642
+ if (isAbortError) {
1643
+ // User canceled the request - don't show error banner
1644
+ console.log('[AIChatPanel] Request was aborted by user');
1645
+ // Don't set error state - no red banner
1646
+
1647
+ // Update history to show cancellation
1648
+ if (promptKey) {
1649
+ setHistory((prev) => ({
1650
+ ...prev,
1651
+ [promptKey]: {
1652
+ content: 'Response canceled',
1653
+ callId: lastCallId || '',
1654
+ },
1655
+ }));
1656
+ }
1657
+ }
1635
1658
  // Detect 413 Content Too Large error
1636
- if (errorMsg.includes('413') || errorMsg.toLowerCase().includes('content too large')) {
1659
+ else if (errorMsg.includes('413') || errorMsg.toLowerCase().includes('content too large')) {
1637
1660
  setError({
1638
1661
  message: 'The context is too large to process. Please start a new conversation or reduce the amount of context.',
1639
1662
  code: '413',
1640
1663
  });
1664
+
1665
+ // Update history to show error
1666
+ if (promptKey) {
1667
+ setHistory((prev) => ({
1668
+ ...prev,
1669
+ [promptKey]: {
1670
+ content: `Error: ${errorMsg}`,
1671
+ callId: lastCallId || '',
1672
+ },
1673
+ }));
1674
+ }
1641
1675
  }
1642
1676
  // Detect other network errors
1643
1677
  else if (errorMsg.toLowerCase().includes('network error') || errorMsg.toLowerCase().includes('fetch')) {
@@ -1645,6 +1679,17 @@ const AIChatPanel: React.FC<AIChatPanelProps> = ({
1645
1679
  message: 'Network error. Please check your connection and try again.',
1646
1680
  code: 'NETWORK_ERROR',
1647
1681
  });
1682
+
1683
+ // Update history to show error
1684
+ if (promptKey) {
1685
+ setHistory((prev) => ({
1686
+ ...prev,
1687
+ [promptKey]: {
1688
+ content: `Error: ${errorMsg}`,
1689
+ callId: lastCallId || '',
1690
+ },
1691
+ }));
1692
+ }
1648
1693
  }
1649
1694
  // Generic error
1650
1695
  else {
@@ -1652,21 +1697,21 @@ const AIChatPanel: React.FC<AIChatPanelProps> = ({
1652
1697
  message: errorMsg,
1653
1698
  code: 'UNKNOWN_ERROR',
1654
1699
  });
1700
+
1701
+ // Update history to show error
1702
+ if (promptKey) {
1703
+ setHistory((prev) => ({
1704
+ ...prev,
1705
+ [promptKey]: {
1706
+ content: `Error: ${errorMsg}`,
1707
+ callId: lastCallId || '',
1708
+ },
1709
+ }));
1710
+ }
1655
1711
  }
1656
1712
 
1657
1713
  // Reset loading state
1658
1714
  setIsLoading(false);
1659
-
1660
- // Update history to show error
1661
- if (promptKey) {
1662
- setHistory((prev) => ({
1663
- ...prev,
1664
- [promptKey]: {
1665
- content: `Error: ${errorMsg}`,
1666
- callId: lastCallId || '',
1667
- },
1668
- }));
1669
- }
1670
1715
  }
1671
1716
  );
1672
1717
 
@@ -1959,17 +2004,49 @@ const AIChatPanel: React.FC<AIChatPanelProps> = ({
1959
2004
  // Monitor for errors from useLLM hook
1960
2005
  useEffect(() => {
1961
2006
  if (llmError && llmError.trim()) {
2007
+ // Skip if we've already processed this exact error
2008
+ if (lastProcessedErrorRef.current === llmError) {
2009
+ console.log('[AIChatPanel] Skipping duplicate error:', llmError);
2010
+ return;
2011
+ }
2012
+
1962
2013
  console.log('[AIChatPanel] Error detected:', llmError);
2014
+ lastProcessedErrorRef.current = llmError;
1963
2015
 
1964
2016
  // Parse error message to detect specific error types
1965
2017
  const errorMessage = llmError;
1966
2018
 
2019
+ // Check if this is a user-initiated abort
2020
+ const isAbortError = errorMessage.toLowerCase().includes('abort') ||
2021
+ errorMessage.toLowerCase().includes('canceled') ||
2022
+ errorMessage.toLowerCase().includes('cancelled');
2023
+
2024
+ if (isAbortError) {
2025
+ // User canceled the request - don't show error banner
2026
+ console.log('[AIChatPanel] Request was aborted by user (useEffect)');
2027
+ // Don't set error state - no red banner
2028
+
2029
+ // Don't update history here - the error callback in send() already handled it
2030
+ // with the correct promptKey. Updating here with lastKey can affect the wrong entry
2031
+ // if the user has already submitted a new prompt.
2032
+ }
1967
2033
  // Detect 413 Content Too Large error
1968
- if (errorMessage.includes('413') || errorMessage.toLowerCase().includes('content too large')) {
2034
+ else if (errorMessage.includes('413') || errorMessage.toLowerCase().includes('content too large')) {
1969
2035
  setError({
1970
2036
  message: 'The context is too large to process. Please start a new conversation or reduce the amount of context.',
1971
2037
  code: '413',
1972
2038
  });
2039
+
2040
+ // Update history to show error
2041
+ if (lastKey) {
2042
+ setHistory((prev) => ({
2043
+ ...prev,
2044
+ [lastKey]: {
2045
+ content: `Error: ${errorMessage}`,
2046
+ callId: lastCallId || '',
2047
+ },
2048
+ }));
2049
+ }
1973
2050
  }
1974
2051
  // Detect other network errors
1975
2052
  else if (errorMessage.toLowerCase().includes('network error') || errorMessage.toLowerCase().includes('fetch')) {
@@ -1977,6 +2054,17 @@ const AIChatPanel: React.FC<AIChatPanelProps> = ({
1977
2054
  message: 'Network error. Please check your connection and try again.',
1978
2055
  code: 'NETWORK_ERROR',
1979
2056
  });
2057
+
2058
+ // Update history to show error
2059
+ if (lastKey) {
2060
+ setHistory((prev) => ({
2061
+ ...prev,
2062
+ [lastKey]: {
2063
+ content: `Error: ${errorMessage}`,
2064
+ callId: lastCallId || '',
2065
+ },
2066
+ }));
2067
+ }
1980
2068
  }
1981
2069
  // Generic error
1982
2070
  else {
@@ -1984,21 +2072,21 @@ const AIChatPanel: React.FC<AIChatPanelProps> = ({
1984
2072
  message: errorMessage,
1985
2073
  code: 'UNKNOWN_ERROR',
1986
2074
  });
2075
+
2076
+ // Update history to show error
2077
+ if (lastKey) {
2078
+ setHistory((prev) => ({
2079
+ ...prev,
2080
+ [lastKey]: {
2081
+ content: `Error: ${errorMessage}`,
2082
+ callId: lastCallId || '',
2083
+ },
2084
+ }));
2085
+ }
1987
2086
  }
1988
2087
 
1989
2088
  // Reset loading state
1990
2089
  setIsLoading(false);
1991
-
1992
- // Update history to show error
1993
- if (lastKey) {
1994
- setHistory((prev) => ({
1995
- ...prev,
1996
- [lastKey]: {
1997
- content: `Error: ${errorMessage}`,
1998
- callId: lastCallId || '',
1999
- },
2000
- }));
2001
- }
2002
2090
  }
2003
2091
  }, [llmError, lastKey, lastCallId]);
2004
2092