@fugood/bricks-project 2.21.7 → 2.21.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -257,6 +257,10 @@ export const templateActionNameMap = {
257
257
  BRICK_WEBVIEW_INJECT_JAVASCRIPT: {
258
258
  javascriptCode: 'BRICK_WEBVIEW_JAVASCRIPT_CODE',
259
259
  },
260
+ BRICK_WEBVIEW_QUERY_SELECTOR: {
261
+ querySelector: 'BRICK_WEBVIEW_QUERY_SELECTOR',
262
+ expression: 'BRICK_WEBVIEW_EXPRESSION',
263
+ },
260
264
  },
261
265
  BRICK_CAMERA: {
262
266
  BRICK_CAMERA_TAKE_PICTURE: {
@@ -592,6 +596,15 @@ export const templateActionNameMap = {
592
596
  },
593
597
  },
594
598
  GENERATOR_LLM: {
599
+ GENERATOR_LLM_TOKENIZE: {
600
+ mode: 'GENERATOR_LLM_MODE',
601
+ prompt: 'GENERATOR_LLM_PROMPT',
602
+ promptMediaPaths: 'GENERATOR_LLM_PROMPT_MEDIA_PATHS',
603
+ messages: 'GENERATOR_LLM_MESSAGES',
604
+ },
605
+ GENERATOR_LLM_DETOKENIZE: {
606
+ tokens: 'GENERATOR_LLM_TOKENS',
607
+ },
595
608
  GENERATOR_LLM_PROCESS_PROMPT: {
596
609
  sessionKey: 'GENERATOR_LLM_SESSION_KEY',
597
610
  mode: 'GENERATOR_LLM_MODE',
@@ -600,6 +613,7 @@ export const templateActionNameMap = {
600
613
  parallelToolCalls: 'GENERATOR_LLM_PARALLEL_TOOL_CALLS',
601
614
  toolChoice: 'GENERATOR_LLM_TOOL_CHOICE',
602
615
  prompt: 'GENERATOR_LLM_PROMPT',
616
+ promptMediaPaths: 'GENERATOR_LLM_PROMPT_MEDIA_PATHS',
603
617
  promptTemplateData: 'GENERATOR_LLM_PROMPT_TEMPLATE_DATA',
604
618
  promptTemplateType: 'GENERATOR_LLM_PROMPT_TEMPLATE_TYPE',
605
619
  responseFormat: 'GENERATOR_LLM_RESPONSE_FORMAT',
@@ -612,6 +626,7 @@ export const templateActionNameMap = {
612
626
  parallelToolCalls: 'GENERATOR_LLM_PARALLEL_TOOL_CALLS',
613
627
  toolChoice: 'GENERATOR_LLM_TOOL_CHOICE',
614
628
  prompt: 'GENERATOR_LLM_PROMPT',
629
+ promptMediaPaths: 'GENERATOR_LLM_PROMPT_MEDIA_PATHS',
615
630
  promptTemplateData: 'GENERATOR_LLM_PROMPT_TEMPLATE_DATA',
616
631
  promptTemplateType: 'GENERATOR_LLM_PROMPT_TEMPLATE_TYPE',
617
632
  responseFormat: 'GENERATOR_LLM_RESPONSE_FORMAT',
@@ -679,6 +694,7 @@ export const templateActionNameMap = {
679
694
  GENERATOR_ASSISTANT_ADD_MESSAGE: {
680
695
  role: 'GENERATOR_ASSISTANT_ROLE',
681
696
  content: 'GENERATOR_ASSISTANT_CONTENT',
697
+ image: 'GENERATOR_ASSISTANT_IMAGE',
682
698
  payload: 'GENERATOR_ASSISTANT_PAYLOAD',
683
699
  useFileSearch: 'GENERATOR_ASSISTANT_USE_FILE_SEARCH',
684
700
  filePath: 'GENERATOR_ASSISTANT_FILE_PATH',
@@ -702,12 +718,14 @@ export const templateActionNameMap = {
702
718
  GENERATOR_ASSISTANT_UPDATE_MESSAGE_AT_INDEX: {
703
719
  index: 'GENERATOR_ASSISTANT_INDEX',
704
720
  content: 'GENERATOR_ASSISTANT_CONTENT',
721
+ image: 'GENERATOR_ASSISTANT_IMAGE',
705
722
  payload: 'GENERATOR_ASSISTANT_PAYLOAD',
706
723
  },
707
724
  GENERATOR_ASSISTANT_ADD_AUDIO_MESSAGE: {
708
725
  role: 'GENERATOR_ASSISTANT_ROLE',
709
726
  contentFile: 'GENERATOR_ASSISTANT_CONTENT_FILE',
710
727
  contentBase64: 'GENERATOR_ASSISTANT_CONTENT_BASE64',
728
+ image: 'GENERATOR_ASSISTANT_IMAGE',
711
729
  useFileSearch: 'GENERATOR_ASSISTANT_USE_FILE_SEARCH',
712
730
  payload: 'GENERATOR_ASSISTANT_PAYLOAD',
713
731
  filePath: 'GENERATOR_ASSISTANT_FILE_PATH',
@@ -730,6 +748,7 @@ export const templateActionNameMap = {
730
748
  index: 'GENERATOR_ASSISTANT_INDEX',
731
749
  contentFile: 'GENERATOR_ASSISTANT_CONTENT_FILE',
732
750
  contentBase64: 'GENERATOR_ASSISTANT_CONTENT_BASE64',
751
+ image: 'GENERATOR_ASSISTANT_IMAGE',
733
752
  payload: 'GENERATOR_ASSISTANT_PAYLOAD',
734
753
  },
735
754
  GENERATOR_ASSISTANT_REMOVE_MESSAGE_AT_INDEX: {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@fugood/bricks-project",
3
- "version": "2.21.7",
3
+ "version": "2.21.8",
4
4
  "main": "index.ts",
5
5
  "scripts": {
6
6
  "build": "node scripts/build.js"
package/types/bricks.ts CHANGED
@@ -2420,6 +2420,23 @@ export type BrickWebViewActionInjectJavascript = ActionWithParams & {
2420
2420
  }>
2421
2421
  }
2422
2422
 
2423
+ /* Query selector on the WebView */
2424
+ export type BrickWebViewActionQuerySelector = ActionWithParams & {
2425
+ __actionName: 'BRICK_WEBVIEW_QUERY_SELECTOR'
2426
+ params?: Array<
2427
+ | {
2428
+ input: 'querySelector'
2429
+ value?: string | DataLink | EventProperty
2430
+ mapping?: string
2431
+ }
2432
+ | {
2433
+ input: 'expression'
2434
+ value?: string | DataLink | EventProperty
2435
+ mapping?: string
2436
+ }
2437
+ >
2438
+ }
2439
+
2423
2440
  /* Do go forward on the WebView */
2424
2441
  export type BrickWebViewActionGoForward = Action & {
2425
2442
  __actionName: 'BRICK_WEBVIEW_GO_FORWARD'
@@ -2502,6 +2519,12 @@ Default property:
2502
2519
  /* Event of the webview on message by `window.ReactNativeWebView.postMessage` on you're injected javascript code */
2503
2520
  onMessage?: Array<EventAction>
2504
2521
  }
2522
+ outlets?: {
2523
+ /* The result of the query selector action */
2524
+ queryResult?: () => Data
2525
+ /* The error of the query selector action */
2526
+ queryError?: () => Data
2527
+ }
2505
2528
  animation?: AnimationBasicEvents & {
2506
2529
  onLoad?: Animation
2507
2530
  onError?: Animation
@@ -2523,7 +2546,7 @@ export type BrickWebView = Brick &
2523
2546
  | SwitchCondData
2524
2547
  | {
2525
2548
  __typename: 'SwitchCondInnerStateOutlet'
2526
- outlet: ''
2549
+ outlet: 'queryResult' | 'queryError'
2527
2550
  value: any
2528
2551
  }
2529
2552
  }>
package/types/data.ts CHANGED
@@ -56,6 +56,11 @@ export type Data<T = any> = DataDef & {
56
56
  | 'rich-text-content'
57
57
  | 'sandbox-script'
58
58
  | 'llm-prompt'
59
+ | 'llm-messages'
60
+ | 'llm-tools'
61
+ | 'mcp-server-resources'
62
+ | 'mcp-server-tools'
63
+ | 'mcp-server-prompts'
59
64
  }
60
65
  value: T
61
66
  }
@@ -67,6 +72,7 @@ export type DataAssetKind = {
67
72
  | 'media-resource-audio'
68
73
  | 'media-resource-file'
69
74
  | 'lottie-file-uri'
75
+ | 'rive-file-uri'
70
76
  | 'ggml-model-asset'
71
77
  | 'gguf-model-asset'
72
78
  | 'binary-asset'
@@ -5092,24 +5092,19 @@ Default property:
5092
5092
  /* Model type */
5093
5093
  modelType?:
5094
5094
  | 'auto'
5095
- | 'gpt2'
5096
- | 'gptj'
5097
- | 'gpt_bigcode'
5098
- | 'gpt_neo'
5099
- | 'gpt_neox'
5100
- | 'bloom'
5101
- | 'mpt'
5102
- | 'opt'
5103
- | 'llama'
5104
- | 'falcon'
5105
- | 'mistral'
5095
+ | 'text-generation'
5096
+ | 'qwen2-vl'
5097
+ | 'paligemma'
5098
+ | 'llava'
5099
+ | 'llava_onevision'
5100
+ | 'moondream1'
5101
+ | 'florence2'
5102
+ | 'idefics3'
5103
+ | 'smolvlm'
5104
+ | 'phi3_v'
5106
5105
  | 't5'
5107
5106
  | 'mt5'
5108
5107
  | 'longt5'
5109
- | 'phi'
5110
- | 'qwen2'
5111
- | 'stablelm'
5112
- | 'gemma'
5113
5108
  | DataLink
5114
5109
  /* Load quantized model (deprecated, use `quantizeType` instead) */
5115
5110
  quantized?: boolean | DataLink
@@ -5902,6 +5897,48 @@ export type GeneratorLLMActionLoadModel = Action & {
5902
5897
  __actionName: 'GENERATOR_LLM_LOAD_MODEL'
5903
5898
  }
5904
5899
 
5900
+ /* Load multimodal (vision) model (PREVIEW FEATURE) */
5901
+ export type GeneratorLLMActionLoadMultimodalModel = Action & {
5902
+ __actionName: 'GENERATOR_LLM_LOAD_MULTIMODAL_MODEL'
5903
+ }
5904
+
5905
+ /* Tokenize the prompt */
5906
+ export type GeneratorLLMActionTokenize = ActionWithParams & {
5907
+ __actionName: 'GENERATOR_LLM_TOKENIZE'
5908
+ params?: Array<
5909
+ | {
5910
+ input: 'mode'
5911
+ value?: string | DataLink | EventProperty
5912
+ mapping?: string
5913
+ }
5914
+ | {
5915
+ input: 'prompt'
5916
+ value?: string | DataLink | EventProperty
5917
+ mapping?: string
5918
+ }
5919
+ | {
5920
+ input: 'promptMediaPaths'
5921
+ value?: Array<any> | DataLink | EventProperty
5922
+ mapping?: string
5923
+ }
5924
+ | {
5925
+ input: 'messages'
5926
+ value?: Array<any> | DataLink | EventProperty
5927
+ mapping?: string
5928
+ }
5929
+ >
5930
+ }
5931
+
5932
+ /* Detokenize the tokens to text */
5933
+ export type GeneratorLLMActionDetokenize = ActionWithParams & {
5934
+ __actionName: 'GENERATOR_LLM_DETOKENIZE'
5935
+ params?: Array<{
5936
+ input: 'tokens'
5937
+ value?: Array<any> | DataLink | EventProperty
5938
+ mapping?: string
5939
+ }>
5940
+ }
5941
+
5905
5942
  /* Pre-process the prompt, this can speed up the completion action */
5906
5943
  export type GeneratorLLMActionProcessPrompt = ActionWithParams & {
5907
5944
  __actionName: 'GENERATOR_LLM_PROCESS_PROMPT'
@@ -5941,6 +5978,11 @@ export type GeneratorLLMActionProcessPrompt = ActionWithParams & {
5941
5978
  value?: string | DataLink | EventProperty
5942
5979
  mapping?: string
5943
5980
  }
5981
+ | {
5982
+ input: 'promptMediaPaths'
5983
+ value?: Array<any> | DataLink | EventProperty
5984
+ mapping?: string
5985
+ }
5944
5986
  | {
5945
5987
  input: 'promptTemplateData'
5946
5988
  value?: {} | DataLink | EventProperty
@@ -5998,6 +6040,11 @@ export type GeneratorLLMActionCompletion = ActionWithParams & {
5998
6040
  value?: string | DataLink | EventProperty
5999
6041
  mapping?: string
6000
6042
  }
6043
+ | {
6044
+ input: 'promptMediaPaths'
6045
+ value?: Array<any> | DataLink | EventProperty
6046
+ mapping?: string
6047
+ }
6001
6048
  | {
6002
6049
  input: 'promptTemplateData'
6003
6050
  value?: {} | DataLink | EventProperty
@@ -6178,6 +6225,11 @@ export type GeneratorLLMActionClearDownload = Action & {
6178
6225
  __actionName: 'GENERATOR_LLM_CLEAR_DOWNLOAD'
6179
6226
  }
6180
6227
 
6228
+ /* Release multimodal (vision) context (PREVIEW FEATURE) */
6229
+ export type GeneratorLLMActionReleaseMultimodalContext = Action & {
6230
+ __actionName: 'GENERATOR_LLM_RELEASE_MULTIMODAL_CONTEXT'
6231
+ }
6232
+
6181
6233
  /* Release context */
6182
6234
  export type GeneratorLLMActionReleaseContext = Action & {
6183
6235
  __actionName: 'GENERATOR_LLM_RELEASE_CONTEXT'
@@ -6255,6 +6307,14 @@ Default property:
6255
6307
  modelHashType?: 'md5' | 'sha256' | 'sha1' | DataLink
6256
6308
  /* Hash of model */
6257
6309
  modelHash?: string | DataLink
6310
+ /* Load multimodal (vision) context after model loaded (PREVIEW FEATURE) */
6311
+ initMultimodal?: boolean | DataLink
6312
+ /* The URL or path of mmproj file for multimodal vision support (PREVIEW FEATURE) */
6313
+ mmprojUrl?: string | DataLink
6314
+ /* Hash type of mmproj file (PREVIEW FEATURE) */
6315
+ mmprojHashType?: 'md5' | 'sha256' | 'sha1' | DataLink
6316
+ /* Hash of mmproj file (PREVIEW FEATURE) */
6317
+ mmprojHash?: string | DataLink
6258
6318
  /* Chat Template (Jinja format) to override the default template from model */
6259
6319
  chatTemplate?: string | DataLink
6260
6320
  /* Context size (0 ~ 4096) (Default to 512) */
@@ -6324,6 +6384,9 @@ Default property:
6324
6384
  | DataLink
6325
6385
  /* Prompt (text mode) */
6326
6386
  completionPrompt?: string | DataLink
6387
+ /* Media paths to be used in the prompt template (PREVIEW FEATURE)
6388
+ In prompt, use `<__image__>` for position of media content */
6389
+ completionPromptMediaPaths?: Array<string | DataLink> | DataLink
6327
6390
  /* Data to be used in the prompt template (e.g. `Hello ${name}`). Supports nested data, such as `Hello ${user.name}`. */
6328
6391
  completionPromptTemplateData?: {} | DataLink
6329
6392
  /* The prompt template type */
@@ -6421,6 +6484,10 @@ Default property:
6421
6484
  sessions?: () => Data
6422
6485
  /* Is evaluating */
6423
6486
  isEvaluating?: () => Data
6487
+ /* Tokenize result */
6488
+ tokenizeResult?: () => Data
6489
+ /* Detokenize result */
6490
+ detokenizeResult?: () => Data
6424
6491
  /* Last formatted prompt (messages or prompt) */
6425
6492
  completionLastFormattedPrompt?: () => Data
6426
6493
  /* Last completion token */
@@ -6461,6 +6528,8 @@ export type GeneratorLLM = Generator &
6461
6528
  | 'contextDetails'
6462
6529
  | 'sessions'
6463
6530
  | 'isEvaluating'
6531
+ | 'tokenizeResult'
6532
+ | 'detokenizeResult'
6464
6533
  | 'completionLastFormattedPrompt'
6465
6534
  | 'completionLastToken'
6466
6535
  | 'completionResult'
@@ -6521,6 +6590,7 @@ Default property:
6521
6590
  {
6522
6591
  "modelType": "Llama 3.2 3B Chat",
6523
6592
  "chatFormat": "Llama 3.x",
6593
+ "toolsInUserMessage": true,
6524
6594
  "toolCallParser": "llama3_json",
6525
6595
  "toolChoice": "auto",
6526
6596
  "parallelToolCalls": false,
@@ -6549,9 +6619,18 @@ Default property:
6549
6619
  /* Custom model split parts */
6550
6620
  customModelSplitParts?: number | DataLink
6551
6621
  /* Chat format */
6552
- chatFormat?: 'Llama 2' | 'Llama 3' | 'Llama 3.x' | 'Mistral v0.3' | 'Qwen 2' | DataLink
6622
+ chatFormat?:
6623
+ | 'Llama 2'
6624
+ | 'Llama 3'
6625
+ | 'Llama 3.x'
6626
+ | 'Mistral v0.3'
6627
+ | 'Qwen 2'
6628
+ | 'Custom'
6629
+ | DataLink
6553
6630
  /* Custom chat format template */
6554
6631
  customChatFormat?: string | DataLink
6632
+ /* Put tools in user message */
6633
+ toolsInUserMessage?: boolean | DataLink
6555
6634
  /* Prompt to generate */
6556
6635
  prompt?: string | DataLink
6557
6636
  /* Chat messages */
@@ -6712,7 +6791,7 @@ interface GeneratorOpenAILLMDef {
6712
6791
  Default property:
6713
6792
  {
6714
6793
  "apiEndpoint": "https://api.openai.com/v1",
6715
- "model": "gpt-4o-mini",
6794
+ "model": "gpt-4o",
6716
6795
  "completionMessages": [
6717
6796
  {
6718
6797
  "role": "system",
@@ -6722,8 +6801,6 @@ Default property:
6722
6801
  "completionMaxTokens": 1024,
6723
6802
  "completionTemperature": 1,
6724
6803
  "completionTopP": 1,
6725
- "completionFrequencyPenalty": 0,
6726
- "completionPresencePenalty": 0,
6727
6804
  "completionStop": []
6728
6805
  }
6729
6806
  */
@@ -6812,7 +6889,11 @@ Default property:
6812
6889
  - Compatible with OpenAI API format
6813
6890
  - Supports function calling
6814
6891
  - Streaming responses
6815
- - Custom API endpoints */
6892
+ - Custom API endpoints, like
6893
+ - OpenAI API: https://platform.openai.com/docs/guides/text?api-mode=chat
6894
+ - Anthropic API: https://docs.anthropic.com/en/api/openai-sdk
6895
+ - Gemini API: https://ai.google.dev/gemini-api/docs/openai
6896
+ - llama.cpp server: https://github.com/ggml-org/llama.cpp/tree/master/tools/server */
6816
6897
  export type GeneratorOpenAILLM = Generator &
6817
6898
  GeneratorOpenAILLMDef & {
6818
6899
  templateKey: 'GENERATOR_OPENAI_LLM'
@@ -6946,6 +7027,11 @@ export type GeneratorAssistantActionAddMessage = ActionWithParams & {
6946
7027
  value?: string | DataLink | EventProperty
6947
7028
  mapping?: string
6948
7029
  }
7030
+ | {
7031
+ input: 'image'
7032
+ value?: string | DataLink | EventProperty
7033
+ mapping?: string
7034
+ }
6949
7035
  | {
6950
7036
  input: 'payload'
6951
7037
  value?: {} | DataLink | EventProperty
@@ -7052,6 +7138,11 @@ export type GeneratorAssistantActionUpdateMessageAtIndex = ActionWithParams & {
7052
7138
  value?: string | DataLink | EventProperty
7053
7139
  mapping?: string
7054
7140
  }
7141
+ | {
7142
+ input: 'image'
7143
+ value?: string | DataLink | EventProperty
7144
+ mapping?: string
7145
+ }
7055
7146
  | {
7056
7147
  input: 'payload'
7057
7148
  value?: {} | DataLink | EventProperty
@@ -7079,6 +7170,11 @@ export type GeneratorAssistantActionAddAudioMessage = ActionWithParams & {
7079
7170
  value?: string | DataLink | EventProperty
7080
7171
  mapping?: string
7081
7172
  }
7173
+ | {
7174
+ input: 'image'
7175
+ value?: string | DataLink | EventProperty
7176
+ mapping?: string
7177
+ }
7082
7178
  | {
7083
7179
  input: 'useFileSearch'
7084
7180
  value?: boolean | DataLink | EventProperty
@@ -7178,6 +7274,11 @@ export type GeneratorAssistantActionUpdateAudioMessageAtIndex = ActionWithParams
7178
7274
  value?: string | DataLink | EventProperty
7179
7275
  mapping?: string
7180
7276
  }
7277
+ | {
7278
+ input: 'image'
7279
+ value?: string | DataLink | EventProperty
7280
+ mapping?: string
7281
+ }
7181
7282
  | {
7182
7283
  input: 'payload'
7183
7284
  value?: {} | DataLink | EventProperty