lemma-sdk 0.2.19 → 0.2.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/README.md +71 -33
  2. package/dist/auth.d.ts +42 -1
  3. package/dist/auth.js +43 -0
  4. package/dist/browser/lemma-client.js +49 -1
  5. package/dist/browser.d.ts +1 -1
  6. package/dist/browser.js +1 -1
  7. package/dist/index.d.ts +2 -2
  8. package/dist/index.js +1 -1
  9. package/dist/namespaces/assistants.d.ts +2 -0
  10. package/dist/namespaces/assistants.js +3 -0
  11. package/dist/openapi_client/index.d.ts +3 -0
  12. package/dist/openapi_client/index.js +1 -0
  13. package/dist/openapi_client/models/AvailableModelInfo.d.ts +8 -0
  14. package/dist/openapi_client/models/AvailableModelInfo.js +1 -0
  15. package/dist/openapi_client/models/AvailableModels.d.ts +3 -4
  16. package/dist/openapi_client/models/AvailableModels.js +2 -3
  17. package/dist/openapi_client/models/AvailableModelsListResponse.d.ts +7 -0
  18. package/dist/openapi_client/models/AvailableModelsListResponse.js +1 -0
  19. package/dist/openapi_client/models/BulkCreateRecordsRequest.d.ts +4 -0
  20. package/dist/openapi_client/models/CreateFunctionRequest.d.ts +2 -0
  21. package/dist/openapi_client/models/FunctionResponse.d.ts +2 -0
  22. package/dist/openapi_client/models/FunctionRunResponse.d.ts +4 -0
  23. package/dist/openapi_client/models/FunctionType.d.ts +7 -0
  24. package/dist/openapi_client/models/FunctionType.js +12 -0
  25. package/dist/openapi_client/models/UpdateFunctionRequest.d.ts +2 -0
  26. package/dist/openapi_client/services/ConversationsService.d.ts +8 -0
  27. package/dist/openapi_client/services/ConversationsService.js +12 -0
  28. package/dist/react/components/AssistantChrome.js +13 -23
  29. package/dist/react/components/AssistantExperience.d.ts +12 -4
  30. package/dist/react/components/AssistantExperience.js +40 -40
  31. package/dist/react/components/assistant-types.d.ts +7 -0
  32. package/dist/react/index.d.ts +3 -3
  33. package/dist/react/index.js +1 -1
  34. package/dist/react/styles.css +1246 -5
  35. package/dist/react/useAssistantController.d.ts +2 -1
  36. package/dist/react/useAssistantController.js +34 -1
  37. package/dist/react/useAssistantRuntime.js +25 -4
  38. package/dist/react/useAssistantSession.js +14 -5
  39. package/dist/types.d.ts +1 -1
  40. package/package.json +1 -1
@@ -1,5 +1,5 @@
1
1
  import type { LemmaClient } from "../client.js";
2
- import type { Conversation, ConversationModel } from "../types.js";
2
+ import type { AvailableModelInfo, Conversation, ConversationModel } from "../types.js";
3
3
  export interface AssistantConversationScope {
4
4
  podId?: string | null;
5
5
  assistantId?: string | null;
@@ -54,6 +54,7 @@ export interface UseAssistantControllerResult {
54
54
  messages: AssistantRenderableMessage[];
55
55
  conversations: Conversation[];
56
56
  activeConversationId: string | null;
57
+ availableModels: AvailableModelInfo[];
57
58
  conversationModel: ConversationModel | null;
58
59
  isActiveConversationRunning: boolean;
59
60
  isLoading: boolean;
@@ -543,6 +543,7 @@ export function useAssistantController({ client, podId, assistantId, organizatio
543
543
  const [messages, setMessages] = useState([]);
544
544
  const [conversations, setConversations] = useState([]);
545
545
  const [activeConversationId, setActiveConversationId] = useState(null);
546
+ const [availableModels, setAvailableModels] = useState([]);
546
547
  const [conversationModel, setConversationModelState] = useState(null);
547
548
  const [isStreaming, setIsStreaming] = useState(false);
548
549
  const [isLoadingConversations, setIsLoadingConversations] = useState(false);
@@ -649,6 +650,15 @@ export function useAssistantController({ client, podId, assistantId, organizatio
649
650
  setIsLoadingConversations(false);
650
651
  }
651
652
  }, [scope, sessionListConversations]);
653
+ const loadAvailableModels = useCallback(async () => {
654
+ try {
655
+ const response = await client.conversations.listModels();
656
+ return response.items ?? [];
657
+ }
658
+ catch {
659
+ return [];
660
+ }
661
+ }, [client]);
652
662
  const loadConversationMessages = useCallback(async (conversationId) => {
653
663
  setIsLoadingMessages(true);
654
664
  try {
@@ -706,6 +716,23 @@ export function useAssistantController({ client, podId, assistantId, organizatio
706
716
  useEffect(() => {
707
717
  conversationsRef.current = conversations;
708
718
  }, [conversations]);
719
+ useEffect(() => {
720
+ if (!enabled) {
721
+ setAvailableModels([]);
722
+ return;
723
+ }
724
+ let cancelled = false;
725
+ void loadAvailableModels()
726
+ .then((models) => {
727
+ if (cancelled)
728
+ return;
729
+ setAvailableModels(models);
730
+ })
731
+ .catch(() => undefined);
732
+ return () => {
733
+ cancelled = true;
734
+ };
735
+ }, [enabled, loadAvailableModels]);
709
736
  useEffect(() => {
710
737
  const conversationId = activeConversationIdRef.current;
711
738
  if (!conversationId) {
@@ -759,6 +786,7 @@ export function useAssistantController({ client, podId, assistantId, organizatio
759
786
  loadingConversationIdRef.current = null;
760
787
  skipInitialLoadConversationIdsRef.current.clear();
761
788
  setActiveConversationId(null);
789
+ setAvailableModels([]);
762
790
  setConversationModelState(null);
763
791
  setConversations([]);
764
792
  setMessages([]);
@@ -842,8 +870,12 @@ export function useAssistantController({ client, podId, assistantId, organizatio
842
870
  const conversationIsRunning = isConversationRunning(activeConversation?.status);
843
871
  if (!hadActiveStream && !conversationIsRunning)
844
872
  return;
873
+ const previousStatus = activeConversation?.status;
845
874
  touchConversation(conversationId, { status: "waiting" });
846
- void sessionStop(conversationId).catch(() => undefined);
875
+ void sessionStop(conversationId).catch((error) => {
876
+ touchConversation(conversationId, { status: previousStatus });
877
+ setLocalError((prev) => prev || (error instanceof Error ? error.message : "Failed to stop conversation"));
878
+ });
847
879
  }, [isStreaming, sessionCancel, sessionIsStreaming, sessionStop, touchConversation]);
848
880
  const selectConversation = useCallback((conversationId) => {
849
881
  if (sessionIsStreaming || isStreaming) {
@@ -1066,6 +1098,7 @@ export function useAssistantController({ client, podId, assistantId, organizatio
1066
1098
  messages,
1067
1099
  conversations,
1068
1100
  activeConversationId,
1101
+ availableModels,
1069
1102
  conversationModel,
1070
1103
  isActiveConversationRunning,
1071
1104
  isLoading,
@@ -21,6 +21,7 @@ function messageTime(message) {
21
21
  function isOptimisticId(messageId) {
22
22
  return messageId.startsWith("optimistic-user-");
23
23
  }
24
+ const OPTIMISTIC_MATCH_WINDOW_MS = 2 * 60 * 1000;
24
25
  function upsertRuntimeMessage(previous, incoming) {
25
26
  const next = [...previous];
26
27
  const directIndex = next.findIndex((message) => message.id === incoming.id);
@@ -31,9 +32,22 @@ function upsertRuntimeMessage(previous, incoming) {
31
32
  if (incoming.role === "user") {
32
33
  const incomingText = messageText(incoming.content);
33
34
  if (incomingText) {
34
- const optimisticIndex = next.findIndex((message) => (message.role === "user"
35
- && isOptimisticId(message.id)
36
- && messageText(message.content) === incomingText));
35
+ const incomingTimestamp = messageTime(incoming);
36
+ let optimisticIndex = -1;
37
+ let bestDistance = Number.POSITIVE_INFINITY;
38
+ next.forEach((message, index) => {
39
+ if (message.role !== "user"
40
+ || !isOptimisticId(message.id)
41
+ || messageText(message.content) !== incomingText) {
42
+ return;
43
+ }
44
+ const distance = Math.abs(messageTime(message) - incomingTimestamp);
45
+ if (distance > OPTIMISTIC_MATCH_WINDOW_MS || distance >= bestDistance) {
46
+ return;
47
+ }
48
+ optimisticIndex = index;
49
+ bestDistance = distance;
50
+ });
37
51
  if (optimisticIndex >= 0) {
38
52
  next[optimisticIndex] = incoming;
39
53
  return next;
@@ -71,7 +85,14 @@ export function useAssistantRuntime({ conversationId = null, sessionMessages = [
71
85
  const normalized = messages
72
86
  .map((message) => toRuntimeMessage(message, conversationId))
73
87
  .filter((message) => !conversationId || message.conversation_id === conversationId);
74
- setRuntimeMessages([...normalized].sort((a, b) => messageTime(a) - messageTime(b)));
88
+ setRuntimeMessages((previous) => {
89
+ const scopedPrevious = previous.filter((message) => !conversationId || message.conversation_id === conversationId);
90
+ // Loads can complete after optimistic appends or stream events. Merge the
91
+ // loaded snapshot into the current runtime state so newer local messages
92
+ // are not temporarily dropped while the server catches up.
93
+ const merged = normalized.reduce((accumulator, message) => upsertRuntimeMessage(accumulator, message), scopedPrevious);
94
+ return [...merged].sort((a, b) => messageTime(a) - messageTime(b));
95
+ });
75
96
  }, [conversationId]);
76
97
  const appendOptimisticUserMessage = useCallback((content, options) => {
77
98
  const trimmed = content.trim();
@@ -393,12 +393,21 @@ export function useAssistantSession(options) {
393
393
  return false;
394
394
  }
395
395
  }
396
+ const previousResumeKey = autoResumedKeyRef.current;
396
397
  autoResumedKeyRef.current = resumeKey;
397
- await resume({
398
- conversationId: id,
399
- onlyIfRunning: true,
400
- });
401
- return true;
398
+ try {
399
+ await resume({
400
+ conversationId: id,
401
+ onlyIfRunning: true,
402
+ });
403
+ return true;
404
+ }
405
+ catch (error) {
406
+ if (autoResumedKeyRef.current === resumeKey) {
407
+ autoResumedKeyRef.current = previousResumeKey;
408
+ }
409
+ throw error;
410
+ }
402
411
  }, [conversationId, isStreaming, refreshConversation, resume]);
403
412
  const stop = useCallback(async (explicitConversationId) => {
404
413
  const id = requireConversationId(explicitConversationId ?? conversationId);
package/dist/types.d.ts CHANGED
@@ -40,7 +40,7 @@ export type CreateAssistantInput = CreateAssistantRequest;
40
40
  export type UpdateAssistantInput = UpdateAssistantRequest;
41
41
  export type Conversation = ConversationResponse;
42
42
  export type ConversationMessage = ConversationMessageResponse;
43
- export type ConversationModel = `${AvailableModels}`;
43
+ export type ConversationModel = `${AvailableModels}` | (string & {});
44
44
  export type Task = TaskResponse;
45
45
  export type TaskMessage = TaskMessageResponse;
46
46
  export type FunctionRun = FunctionRunResponse;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "lemma-sdk",
3
- "version": "0.2.19",
3
+ "version": "0.2.21",
4
4
  "description": "Official TypeScript SDK for Lemma pod-scoped APIs",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",