@fugood/bricks-project 2.22.0-beta.11 → 2.22.0-beta.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -257,6 +257,10 @@ export const templateActionNameMap = {
257
257
  BRICK_WEBVIEW_INJECT_JAVASCRIPT: {
258
258
  javascriptCode: 'BRICK_WEBVIEW_JAVASCRIPT_CODE',
259
259
  },
260
+ BRICK_WEBVIEW_QUERY_SELECTOR: {
261
+ querySelector: 'BRICK_WEBVIEW_QUERY_SELECTOR',
262
+ expression: 'BRICK_WEBVIEW_EXPRESSION',
263
+ },
260
264
  },
261
265
  BRICK_CAMERA: {
262
266
  BRICK_CAMERA_TAKE_PICTURE: {
@@ -552,6 +556,8 @@ export const templateActionNameMap = {
552
556
  prompt: 'GENERATOR_ONNX_LLM_PROMPT',
553
557
  chat: 'GENERATOR_ONNX_LLM_CHAT',
554
558
  images: 'GENERATOR_ONNX_LLM_IMAGES',
559
+ tools: 'GENERATOR_ONNX_LLM_TOOLS',
560
+ toolChoice: 'GENERATOR_ONNX_LLM_TOOL_CHOICE',
555
561
  },
556
562
  },
557
563
  GENERATOR_ONNX_STT: {
@@ -590,6 +596,15 @@ export const templateActionNameMap = {
590
596
  },
591
597
  },
592
598
  GENERATOR_LLM: {
599
+ GENERATOR_LLM_TOKENIZE: {
600
+ mode: 'GENERATOR_LLM_MODE',
601
+ prompt: 'GENERATOR_LLM_PROMPT',
602
+ promptImagePaths: 'GENERATOR_LLM_PROMPT_IMAGE_PATHS',
603
+ messages: 'GENERATOR_LLM_MESSAGES',
604
+ },
605
+ GENERATOR_LLM_DETOKENIZE: {
606
+ tokens: 'GENERATOR_LLM_TOKENS',
607
+ },
593
608
  GENERATOR_LLM_PROCESS_PROMPT: {
594
609
  sessionKey: 'GENERATOR_LLM_SESSION_KEY',
595
610
  mode: 'GENERATOR_LLM_MODE',
@@ -598,6 +613,7 @@ export const templateActionNameMap = {
598
613
  parallelToolCalls: 'GENERATOR_LLM_PARALLEL_TOOL_CALLS',
599
614
  toolChoice: 'GENERATOR_LLM_TOOL_CHOICE',
600
615
  prompt: 'GENERATOR_LLM_PROMPT',
616
+ promptImagePaths: 'GENERATOR_LLM_PROMPT_IMAGE_PATHS',
601
617
  promptTemplateData: 'GENERATOR_LLM_PROMPT_TEMPLATE_DATA',
602
618
  promptTemplateType: 'GENERATOR_LLM_PROMPT_TEMPLATE_TYPE',
603
619
  responseFormat: 'GENERATOR_LLM_RESPONSE_FORMAT',
@@ -610,6 +626,7 @@ export const templateActionNameMap = {
610
626
  parallelToolCalls: 'GENERATOR_LLM_PARALLEL_TOOL_CALLS',
611
627
  toolChoice: 'GENERATOR_LLM_TOOL_CHOICE',
612
628
  prompt: 'GENERATOR_LLM_PROMPT',
629
+ promptImagePaths: 'GENERATOR_LLM_PROMPT_IMAGE_PATHS',
613
630
  promptTemplateData: 'GENERATOR_LLM_PROMPT_TEMPLATE_DATA',
614
631
  promptTemplateType: 'GENERATOR_LLM_PROMPT_TEMPLATE_TYPE',
615
632
  responseFormat: 'GENERATOR_LLM_RESPONSE_FORMAT',
@@ -650,6 +667,7 @@ export const templateActionNameMap = {
650
667
  GENERATOR_QNN_LLM_GENERATE: {
651
668
  prompt: 'GENERATOR_QNN_LLM_PROMPT',
652
669
  messages: 'GENERATOR_QNN_LLM_MESSAGES',
670
+ tools: 'GENERATOR_QNN_LLM_TOOLS',
653
671
  },
654
672
  },
655
673
  GENERATOR_OPENAI_LLM: {
@@ -676,6 +694,7 @@ export const templateActionNameMap = {
676
694
  GENERATOR_ASSISTANT_ADD_MESSAGE: {
677
695
  role: 'GENERATOR_ASSISTANT_ROLE',
678
696
  content: 'GENERATOR_ASSISTANT_CONTENT',
697
+ image: 'GENERATOR_ASSISTANT_IMAGE',
679
698
  payload: 'GENERATOR_ASSISTANT_PAYLOAD',
680
699
  useFileSearch: 'GENERATOR_ASSISTANT_USE_FILE_SEARCH',
681
700
  filePath: 'GENERATOR_ASSISTANT_FILE_PATH',
@@ -699,12 +718,14 @@ export const templateActionNameMap = {
699
718
  GENERATOR_ASSISTANT_UPDATE_MESSAGE_AT_INDEX: {
700
719
  index: 'GENERATOR_ASSISTANT_INDEX',
701
720
  content: 'GENERATOR_ASSISTANT_CONTENT',
721
+ image: 'GENERATOR_ASSISTANT_IMAGE',
702
722
  payload: 'GENERATOR_ASSISTANT_PAYLOAD',
703
723
  },
704
724
  GENERATOR_ASSISTANT_ADD_AUDIO_MESSAGE: {
705
725
  role: 'GENERATOR_ASSISTANT_ROLE',
706
726
  contentFile: 'GENERATOR_ASSISTANT_CONTENT_FILE',
707
727
  contentBase64: 'GENERATOR_ASSISTANT_CONTENT_BASE64',
728
+ image: 'GENERATOR_ASSISTANT_IMAGE',
708
729
  useFileSearch: 'GENERATOR_ASSISTANT_USE_FILE_SEARCH',
709
730
  payload: 'GENERATOR_ASSISTANT_PAYLOAD',
710
731
  filePath: 'GENERATOR_ASSISTANT_FILE_PATH',
@@ -727,6 +748,7 @@ export const templateActionNameMap = {
727
748
  index: 'GENERATOR_ASSISTANT_INDEX',
728
749
  contentFile: 'GENERATOR_ASSISTANT_CONTENT_FILE',
729
750
  contentBase64: 'GENERATOR_ASSISTANT_CONTENT_BASE64',
751
+ image: 'GENERATOR_ASSISTANT_IMAGE',
730
752
  payload: 'GENERATOR_ASSISTANT_PAYLOAD',
731
753
  },
732
754
  GENERATOR_ASSISTANT_REMOVE_MESSAGE_AT_INDEX: {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@fugood/bricks-project",
3
- "version": "2.22.0-beta.11",
3
+ "version": "2.22.0-beta.13",
4
4
  "main": "index.ts",
5
5
  "scripts": {
6
6
  "build": "node scripts/build.js"
@@ -14,5 +14,5 @@
14
14
  "lodash": "^4.17.4",
15
15
  "uuid": "^8.3.1"
16
16
  },
17
- "gitHead": "a6ee3595eab6b78d68c51effdeac7224e666134c"
17
+ "gitHead": "0039a8fa08e89411caf8608a98ae80d5cca64cbe"
18
18
  }
package/types/bricks.ts CHANGED
@@ -2420,6 +2420,23 @@ export type BrickWebViewActionInjectJavascript = ActionWithParams & {
2420
2420
  }>
2421
2421
  }
2422
2422
 
2423
+ /* Query selector on the WebView */
2424
+ export type BrickWebViewActionQuerySelector = ActionWithParams & {
2425
+ __actionName: 'BRICK_WEBVIEW_QUERY_SELECTOR'
2426
+ params?: Array<
2427
+ | {
2428
+ input: 'querySelector'
2429
+ value?: string | DataLink | EventProperty
2430
+ mapping?: string
2431
+ }
2432
+ | {
2433
+ input: 'expression'
2434
+ value?: string | DataLink | EventProperty
2435
+ mapping?: string
2436
+ }
2437
+ >
2438
+ }
2439
+
2423
2440
  /* Do go forward on the WebView */
2424
2441
  export type BrickWebViewActionGoForward = Action & {
2425
2442
  __actionName: 'BRICK_WEBVIEW_GO_FORWARD'
@@ -2502,6 +2519,12 @@ Default property:
2502
2519
  /* Event of the webview on message by `window.ReactNativeWebView.postMessage` on you're injected javascript code */
2503
2520
  onMessage?: Array<EventAction>
2504
2521
  }
2522
+ outlets?: {
2523
+ /* The result of the query selector action */
2524
+ queryResult?: () => Data
2525
+ /* The error of the query selector action */
2526
+ queryError?: () => Data
2527
+ }
2505
2528
  animation?: AnimationBasicEvents & {
2506
2529
  onLoad?: Animation
2507
2530
  onError?: Animation
@@ -2523,7 +2546,7 @@ export type BrickWebView = Brick &
2523
2546
  | SwitchCondData
2524
2547
  | {
2525
2548
  __typename: 'SwitchCondInnerStateOutlet'
2526
- outlet: ''
2549
+ outlet: 'queryResult' | 'queryError'
2527
2550
  value: any
2528
2551
  }
2529
2552
  }>
package/types/data.ts CHANGED
@@ -56,6 +56,11 @@ export type Data<T = any> = DataDef & {
56
56
  | 'rich-text-content'
57
57
  | 'sandbox-script'
58
58
  | 'llm-prompt'
59
+ | 'llm-messages'
60
+ | 'llm-tools'
61
+ | 'mcp-server-resources'
62
+ | 'mcp-server-tools'
63
+ | 'mcp-server-prompts'
59
64
  }
60
65
  value: T
61
66
  }
@@ -67,6 +72,7 @@ export type DataAssetKind = {
67
72
  | 'media-resource-audio'
68
73
  | 'media-resource-file'
69
74
  | 'lottie-file-uri'
75
+ | 'rive-file-uri'
70
76
  | 'ggml-model-asset'
71
77
  | 'gguf-model-asset'
72
78
  | 'binary-asset'
@@ -5000,6 +5000,16 @@ export type GeneratorOnnxLLMActionInfer = ActionWithParams & {
5000
5000
  value?: Array<any> | DataLink | EventProperty
5001
5001
  mapping?: string
5002
5002
  }
5003
+ | {
5004
+ input: 'tools'
5005
+ value?: Array<any> | DataLink | EventProperty
5006
+ mapping?: string
5007
+ }
5008
+ | {
5009
+ input: 'toolChoice'
5010
+ value?: string | DataLink | EventProperty
5011
+ mapping?: string
5012
+ }
5003
5013
  >
5004
5014
  }
5005
5015
 
@@ -5019,6 +5029,8 @@ Default property:
5019
5029
  {
5020
5030
  "model": "BricksDisplay/phi-1_5-q4",
5021
5031
  "modelType": "auto",
5032
+ "toolCallParser": "llama3_json",
5033
+ "toolChoice": "auto",
5022
5034
  "maxNewTokens": 256,
5023
5035
  "temperature": 0.7,
5024
5036
  "topK": 50,
@@ -5125,6 +5137,14 @@ Default property:
5125
5137
  messages?: Array<DataLink | {}> | DataLink
5126
5138
  /* Images with message to inference */
5127
5139
  images?: Array<string | DataLink> | DataLink
5140
+ /* Tool call parser */
5141
+ toolCallParser?: 'llama3_json' | 'mistral' | 'hermes' | 'internlm' | 'phi4' | DataLink
5142
+ /* Tools for chat mode using OpenAI-compatible function calling format
5143
+ Format: Array of objects with {type, function: {name, description, parameters}} structure
5144
+ See: https://platform.openai.com/docs/guides/function-calling */
5145
+ tools?: Array<{} | DataLink> | DataLink
5146
+ /* Tool choice for chat mode */
5147
+ toolChoice?: 'none' | 'auto' | DataLink
5128
5148
  /* Max new tokens to generate */
5129
5149
  maxNewTokens?: number | DataLink
5130
5150
  /* Temperature */
@@ -5159,6 +5179,10 @@ Default property:
5159
5179
  events?: {
5160
5180
  /* Event triggered when state change */
5161
5181
  onContextStateChange?: Array<EventAction>
5182
+ /* Event triggered on get function call request */
5183
+ onFunctionCall?: Array<EventAction>
5184
+ /* Event triggered on completion finished */
5185
+ onCompletionFinished?: Array<EventAction>
5162
5186
  /* Event triggered when error occurs */
5163
5187
  onError?: Array<EventAction>
5164
5188
  }
@@ -5169,6 +5193,8 @@ Default property:
5169
5193
  generated?: () => Data
5170
5194
  /* Full result of generation */
5171
5195
  fullResult?: () => Data
5196
+ /* Last function call */
5197
+ lastFunctionCall?: () => Data
5172
5198
  }
5173
5199
  }
5174
5200
 
@@ -5187,7 +5213,7 @@ export type GeneratorOnnxLLM = Generator &
5187
5213
  | SwitchCondData
5188
5214
  | {
5189
5215
  __typename: 'SwitchCondInnerStateOutlet'
5190
- outlet: 'contextState' | 'generated' | 'fullResult'
5216
+ outlet: 'contextState' | 'generated' | 'fullResult' | 'lastFunctionCall'
5191
5217
  value: any
5192
5218
  }
5193
5219
  }>
@@ -5878,6 +5904,48 @@ export type GeneratorLLMActionLoadModel = Action & {
5878
5904
  __actionName: 'GENERATOR_LLM_LOAD_MODEL'
5879
5905
  }
5880
5906
 
5907
+ /* Load multimodal (vision) model (PREVIEW FEATURE) */
5908
+ export type GeneratorLLMActionLoadMultimodalModel = Action & {
5909
+ __actionName: 'GENERATOR_LLM_LOAD_MULTIMODAL_MODEL'
5910
+ }
5911
+
5912
+ /* Tokenize the prompt */
5913
+ export type GeneratorLLMActionTokenize = ActionWithParams & {
5914
+ __actionName: 'GENERATOR_LLM_TOKENIZE'
5915
+ params?: Array<
5916
+ | {
5917
+ input: 'mode'
5918
+ value?: string | DataLink | EventProperty
5919
+ mapping?: string
5920
+ }
5921
+ | {
5922
+ input: 'prompt'
5923
+ value?: string | DataLink | EventProperty
5924
+ mapping?: string
5925
+ }
5926
+ | {
5927
+ input: 'promptImagePaths'
5928
+ value?: Array<any> | DataLink | EventProperty
5929
+ mapping?: string
5930
+ }
5931
+ | {
5932
+ input: 'messages'
5933
+ value?: Array<any> | DataLink | EventProperty
5934
+ mapping?: string
5935
+ }
5936
+ >
5937
+ }
5938
+
5939
+ /* Detokenize the tokens to text */
5940
+ export type GeneratorLLMActionDetokenize = ActionWithParams & {
5941
+ __actionName: 'GENERATOR_LLM_DETOKENIZE'
5942
+ params?: Array<{
5943
+ input: 'tokens'
5944
+ value?: Array<any> | DataLink | EventProperty
5945
+ mapping?: string
5946
+ }>
5947
+ }
5948
+
5881
5949
  /* Pre-process the prompt, this can speed up the completion action */
5882
5950
  export type GeneratorLLMActionProcessPrompt = ActionWithParams & {
5883
5951
  __actionName: 'GENERATOR_LLM_PROCESS_PROMPT'
@@ -5917,6 +5985,11 @@ export type GeneratorLLMActionProcessPrompt = ActionWithParams & {
5917
5985
  value?: string | DataLink | EventProperty
5918
5986
  mapping?: string
5919
5987
  }
5988
+ | {
5989
+ input: 'promptImagePaths'
5990
+ value?: Array<any> | DataLink | EventProperty
5991
+ mapping?: string
5992
+ }
5920
5993
  | {
5921
5994
  input: 'promptTemplateData'
5922
5995
  value?: {} | DataLink | EventProperty
@@ -5974,6 +6047,11 @@ export type GeneratorLLMActionCompletion = ActionWithParams & {
5974
6047
  value?: string | DataLink | EventProperty
5975
6048
  mapping?: string
5976
6049
  }
6050
+ | {
6051
+ input: 'promptImagePaths'
6052
+ value?: Array<any> | DataLink | EventProperty
6053
+ mapping?: string
6054
+ }
5977
6055
  | {
5978
6056
  input: 'promptTemplateData'
5979
6057
  value?: {} | DataLink | EventProperty
@@ -6154,6 +6232,11 @@ export type GeneratorLLMActionClearDownload = Action & {
6154
6232
  __actionName: 'GENERATOR_LLM_CLEAR_DOWNLOAD'
6155
6233
  }
6156
6234
 
6235
+ /* Release multimodal (vision) context (PREVIEW FEATURE) */
6236
+ export type GeneratorLLMActionReleaseMultimodalContext = Action & {
6237
+ __actionName: 'GENERATOR_LLM_RELEASE_MULTIMODAL_CONTEXT'
6238
+ }
6239
+
6157
6240
  /* Release context */
6158
6241
  export type GeneratorLLMActionReleaseContext = Action & {
6159
6242
  __actionName: 'GENERATOR_LLM_RELEASE_CONTEXT'
@@ -6231,6 +6314,14 @@ Default property:
6231
6314
  modelHashType?: 'md5' | 'sha256' | 'sha1' | DataLink
6232
6315
  /* Hash of model */
6233
6316
  modelHash?: string | DataLink
6317
+ /* Load multimodal (vision) context after model loaded (PREVIEW FEATURE) */
6318
+ initMultimodal?: boolean | DataLink
6319
+ /* The URL or path of mmproj file for multimodal vision support (PREVIEW FEATURE) */
6320
+ mmprojUrl?: string | DataLink
6321
+ /* Hash type of mmproj file (PREVIEW FEATURE) */
6322
+ mmprojHashType?: 'md5' | 'sha256' | 'sha1' | DataLink
6323
+ /* Hash of mmproj file (PREVIEW FEATURE) */
6324
+ mmprojHash?: string | DataLink
6234
6325
  /* Chat Template (Jinja format) to override the default template from model */
6235
6326
  chatTemplate?: string | DataLink
6236
6327
  /* Context size (0 ~ 4096) (Default to 512) */
@@ -6300,6 +6391,9 @@ Default property:
6300
6391
  | DataLink
6301
6392
  /* Prompt (text mode) */
6302
6393
  completionPrompt?: string | DataLink
6394
+ /* Image paths to be used in the prompt template (PREVIEW FEATURE)
6395
+ In prompt, use `<__image__>` for position of image content */
6396
+ completionPromptImagePaths?: Array<string | DataLink> | DataLink
6303
6397
  /* Data to be used in the prompt template (e.g. `Hello ${name}`). Supports nested data, such as `Hello ${user.name}`. */
6304
6398
  completionPromptTemplateData?: {} | DataLink
6305
6399
  /* The prompt template type */
@@ -6397,6 +6491,10 @@ Default property:
6397
6491
  sessions?: () => Data
6398
6492
  /* Is evaluating */
6399
6493
  isEvaluating?: () => Data
6494
+ /* Tokenize result */
6495
+ tokenizeResult?: () => Data
6496
+ /* Detokenize result */
6497
+ detokenizeResult?: () => Data
6400
6498
  /* Last formatted prompt (messages or prompt) */
6401
6499
  completionLastFormattedPrompt?: () => Data
6402
6500
  /* Last completion token */
@@ -6437,6 +6535,8 @@ export type GeneratorLLM = Generator &
6437
6535
  | 'contextDetails'
6438
6536
  | 'sessions'
6439
6537
  | 'isEvaluating'
6538
+ | 'tokenizeResult'
6539
+ | 'detokenizeResult'
6440
6540
  | 'completionLastFormattedPrompt'
6441
6541
  | 'completionLastToken'
6442
6542
  | 'completionResult'
@@ -6473,6 +6573,11 @@ export type GeneratorQnnLlmActionGenerate = ActionWithParams & {
6473
6573
  value?: Array<any> | DataLink | EventProperty
6474
6574
  mapping?: string
6475
6575
  }
6576
+ | {
6577
+ input: 'tools'
6578
+ value?: Array<any> | DataLink | EventProperty
6579
+ mapping?: string
6580
+ }
6476
6581
  >
6477
6582
  }
6478
6583
 
@@ -6492,6 +6597,7 @@ Default property:
6492
6597
  {
6493
6598
  "modelType": "Llama 3.2 3B Chat",
6494
6599
  "chatFormat": "Llama 3.x",
6600
+ "toolsInUserMessage": true,
6495
6601
  "toolCallParser": "llama3_json",
6496
6602
  "toolChoice": "auto",
6497
6603
  "parallelToolCalls": false,
@@ -6520,9 +6626,18 @@ Default property:
6520
6626
  /* Custom model split parts */
6521
6627
  customModelSplitParts?: number | DataLink
6522
6628
  /* Chat format */
6523
- chatFormat?: 'Llama 2' | 'Llama 3' | 'Llama 3.x' | 'Mistral v0.3' | 'Qwen 2' | DataLink
6629
+ chatFormat?:
6630
+ | 'Llama 2'
6631
+ | 'Llama 3'
6632
+ | 'Llama 3.x'
6633
+ | 'Mistral v0.3'
6634
+ | 'Qwen 2'
6635
+ | 'Custom'
6636
+ | DataLink
6524
6637
  /* Custom chat format template */
6525
6638
  customChatFormat?: string | DataLink
6639
+ /* Put tools in user message */
6640
+ toolsInUserMessage?: boolean | DataLink
6526
6641
  /* Prompt to generate */
6527
6642
  prompt?: string | DataLink
6528
6643
  /* Chat messages */
@@ -6538,7 +6653,7 @@ Default property:
6538
6653
  /* Stop words */
6539
6654
  stopWords?: Array<string | DataLink> | DataLink
6540
6655
  /* Tool call parser */
6541
- toolCallParser?: 'llama3_json' | 'mistral' | DataLink
6656
+ toolCallParser?: 'llama3_json' | 'mistral' | 'hermes' | 'internlm' | 'phi4' | DataLink
6542
6657
  /* Tools for chat mode using OpenAI-compatible function calling format
6543
6658
  Format: Array of objects with {type, function: {name, description, parameters}} structure
6544
6659
  See: https://platform.openai.com/docs/guides/function-calling */
@@ -6917,6 +7032,11 @@ export type GeneratorAssistantActionAddMessage = ActionWithParams & {
6917
7032
  value?: string | DataLink | EventProperty
6918
7033
  mapping?: string
6919
7034
  }
7035
+ | {
7036
+ input: 'image'
7037
+ value?: string | DataLink | EventProperty
7038
+ mapping?: string
7039
+ }
6920
7040
  | {
6921
7041
  input: 'payload'
6922
7042
  value?: {} | DataLink | EventProperty
@@ -7023,6 +7143,11 @@ export type GeneratorAssistantActionUpdateMessageAtIndex = ActionWithParams & {
7023
7143
  value?: string | DataLink | EventProperty
7024
7144
  mapping?: string
7025
7145
  }
7146
+ | {
7147
+ input: 'image'
7148
+ value?: string | DataLink | EventProperty
7149
+ mapping?: string
7150
+ }
7026
7151
  | {
7027
7152
  input: 'payload'
7028
7153
  value?: {} | DataLink | EventProperty
@@ -7050,6 +7175,11 @@ export type GeneratorAssistantActionAddAudioMessage = ActionWithParams & {
7050
7175
  value?: string | DataLink | EventProperty
7051
7176
  mapping?: string
7052
7177
  }
7178
+ | {
7179
+ input: 'image'
7180
+ value?: string | DataLink | EventProperty
7181
+ mapping?: string
7182
+ }
7053
7183
  | {
7054
7184
  input: 'useFileSearch'
7055
7185
  value?: boolean | DataLink | EventProperty
@@ -7149,6 +7279,11 @@ export type GeneratorAssistantActionUpdateAudioMessageAtIndex = ActionWithParams
7149
7279
  value?: string | DataLink | EventProperty
7150
7280
  mapping?: string
7151
7281
  }
7282
+ | {
7283
+ input: 'image'
7284
+ value?: string | DataLink | EventProperty
7285
+ mapping?: string
7286
+ }
7152
7287
  | {
7153
7288
  input: 'payload'
7154
7289
  value?: {} | DataLink | EventProperty
@@ -7305,7 +7440,9 @@ Default property:
7305
7440
  cacheMessages?: boolean | DataLink
7306
7441
  /* LLM Generator (Supports `LLM (GGML)` and `OpenAI LLM` generators) */
7307
7442
  llmGeneratorId?: string | DataLink
7308
- /* LLM Live Policy. If the policy is `only-in-use`, the LLM context will be released when the assistant is not in use. */
7443
+ /* LLM Live Policy. If the policy is `only-in-use`, the LLM context will be released when the assistant is not in use.
7444
+
7445
+ Note: LLM (Qualcomm AI Engine) recommend use `manual` and loaded constantly. */
7309
7446
  llmLivePolicy?: 'only-in-use' | 'manual' | DataLink
7310
7447
  /* LLM main session key */
7311
7448
  llmSessionKey?: string | DataLink
@@ -724,6 +724,16 @@ export const templateEventPropsMap = {
724
724
  onContextStateChange: [
725
725
  'GENERATOR_ONNX_LLM_CONTEXT_STATE', // type: string
726
726
  ],
727
+ onFunctionCall: [
728
+ 'GENERATOR_ONNX_LLM_FUNCTION_CALL_NAME', // type: string
729
+ 'GENERATOR_ONNX_LLM_FUNCTION_CALL_ARGUMENTS', // type: object
730
+ 'GENERATOR_ONNX_LLM_FUNCTION_CALL_DETAILS', // type: object
731
+ ],
732
+ onCompletionFinished: [
733
+ 'GENERATOR_ONNX_LLM_COMPLETION_RESULT', // type: string
734
+ 'GENERATOR_ONNX_LLM_COMPLETION_TOOL_CALLS', // type: array
735
+ 'GENERATOR_ONNX_LLM_COMPLETION_FULL_CONTEXT', // type: string
736
+ ],
727
737
  onError: [
728
738
  'GENERATOR_ONNX_LLM_ERROR', // type: string
729
739
  ],
@@ -788,6 +798,7 @@ export const templateEventPropsMap = {
788
798
  onGenerate: [
789
799
  'GENERATOR_QNN_LLM_RESULT', // type: string
790
800
  'GENERATOR_QNN_LLM_FULL_CONTEXT', // type: string
801
+ 'GENERATOR_QNN_LLM_TOOL_CALLS', // type: array
791
802
  ],
792
803
  onFunctionCall: [
793
804
  'GENERATOR_QNN_LLM_FUNCTION_CALL_NAME', // type: string