@fugood/bricks-project 2.22.0-beta.22 → 2.22.0-beta.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/compile/action-name-map.ts +18 -0
- package/package.json +2 -2
- package/types/generators.ts +215 -8
- package/utils/event-props.ts +17 -0
|
@@ -719,6 +719,11 @@ export const templateActionNameMap = {
|
|
|
719
719
|
},
|
|
720
720
|
},
|
|
721
721
|
GENERATOR_QNN_LLM: {
|
|
722
|
+
GENERATOR_QNN_LLM_PROCESS: {
|
|
723
|
+
prompt: 'GENERATOR_QNN_LLM_PROMPT',
|
|
724
|
+
messages: 'GENERATOR_QNN_LLM_MESSAGES',
|
|
725
|
+
tools: 'GENERATOR_QNN_LLM_TOOLS',
|
|
726
|
+
},
|
|
722
727
|
GENERATOR_QNN_LLM_GENERATE: {
|
|
723
728
|
prompt: 'GENERATOR_QNN_LLM_PROMPT',
|
|
724
729
|
messages: 'GENERATOR_QNN_LLM_MESSAGES',
|
|
@@ -745,6 +750,19 @@ export const templateActionNameMap = {
|
|
|
745
750
|
text: 'GENERATOR_OPENAI_TTS_TEXT',
|
|
746
751
|
},
|
|
747
752
|
},
|
|
753
|
+
GENERATOR_ANTHROPIC_LLM: {
|
|
754
|
+
GENERATOR_ANTHROPIC_LLM_COMPLETION: {
|
|
755
|
+
systemMessage: 'GENERATOR_ANTHROPIC_LLM_SYSTEM_MESSAGE',
|
|
756
|
+
messages: 'GENERATOR_ANTHROPIC_LLM_MESSAGES',
|
|
757
|
+
maxTokens: 'GENERATOR_ANTHROPIC_LLM_MAX_TOKENS',
|
|
758
|
+
temperature: 'GENERATOR_ANTHROPIC_LLM_TEMPERATURE',
|
|
759
|
+
topP: 'GENERATOR_ANTHROPIC_LLM_TOP_P',
|
|
760
|
+
topK: 'GENERATOR_ANTHROPIC_LLM_TOP_K',
|
|
761
|
+
stopSequences: 'GENERATOR_ANTHROPIC_LLM_STOP_SEQUENCES',
|
|
762
|
+
tools: 'GENERATOR_ANTHROPIC_LLM_TOOLS',
|
|
763
|
+
toolChoice: 'GENERATOR_ANTHROPIC_LLM_TOOL_CHOICE',
|
|
764
|
+
},
|
|
765
|
+
},
|
|
748
766
|
GENERATOR_ASSISTANT: {
|
|
749
767
|
GENERATOR_ASSISTANT_ADD_MESSAGE: {
|
|
750
768
|
role: 'GENERATOR_ASSISTANT_ROLE',
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@fugood/bricks-project",
|
|
3
|
-
"version": "2.22.0-beta.
|
|
3
|
+
"version": "2.22.0-beta.23",
|
|
4
4
|
"main": "index.ts",
|
|
5
5
|
"scripts": {
|
|
6
6
|
"build": "node scripts/build.js"
|
|
@@ -14,5 +14,5 @@
|
|
|
14
14
|
"lodash": "^4.17.4",
|
|
15
15
|
"uuid": "^8.3.1"
|
|
16
16
|
},
|
|
17
|
-
"gitHead": "
|
|
17
|
+
"gitHead": "e63aab5d9c9347e019ac3effa18ed641feea2982"
|
|
18
18
|
}
|
package/types/generators.ts
CHANGED
|
@@ -1928,9 +1928,10 @@ interface GeneratorBleCentralDef {
|
|
|
1928
1928
|
Default property:
|
|
1929
1929
|
{
|
|
1930
1930
|
"init": false,
|
|
1931
|
-
"scanTime":
|
|
1932
|
-
"updateInterval":
|
|
1931
|
+
"scanTime": 10,
|
|
1932
|
+
"updateInterval": 1000,
|
|
1933
1933
|
"retainTime": 120,
|
|
1934
|
+
"maximumDiscoveredDevice": 256,
|
|
1934
1935
|
"allowDuplicates": false,
|
|
1935
1936
|
"scanMode": "LowPower"
|
|
1936
1937
|
}
|
|
@@ -6637,12 +6638,14 @@ Default property:
|
|
|
6637
6638
|
useMmap?: boolean | DataLink
|
|
6638
6639
|
/* Use Flash Attention for inference (Recommended with GPU enabled) */
|
|
6639
6640
|
useFlashAttn?: boolean | DataLink
|
|
6640
|
-
/* Use full-size SWA cache. May improve performance for multiple sequences but uses more memory. */
|
|
6641
|
-
useSwaFull?: boolean | DataLink
|
|
6642
6641
|
/* KV cache data type for the K (Default: f16) */
|
|
6643
6642
|
cacheKType?: 'f16' | 'f32' | 'q8_0' | 'q4_0' | 'q4_1' | 'iq4_nl' | 'q5_0' | 'q5_1' | DataLink
|
|
6644
6643
|
/* KV cache data type for the V (Default: f16) */
|
|
6645
6644
|
cacheVType?: 'f16' | 'f32' | 'q8_0' | 'q4_0' | 'q4_1' | 'iq4_nl' | 'q5_0' | 'q5_1' | DataLink
|
|
6645
|
+
/* Use a unified buffer across the input sequences when computing the attention */
|
|
6646
|
+
useKVUnified?: boolean | DataLink
|
|
6647
|
+
/* Use full-size SWA cache. May improve performance for multiple sequences but uses more memory. */
|
|
6648
|
+
useSwaFull?: boolean | DataLink
|
|
6646
6649
|
/* Enable context shift */
|
|
6647
6650
|
ctxShift?: boolean | DataLink
|
|
6648
6651
|
/* Enable Transform Script for processing the prompt */
|
|
@@ -6886,7 +6889,7 @@ Default property:
|
|
|
6886
6889
|
"completionTopK": 40,
|
|
6887
6890
|
"completionTopP": 0.9,
|
|
6888
6891
|
"completionMinP": 0.05,
|
|
6889
|
-
"useGuideToken":
|
|
6892
|
+
"useGuideToken": true,
|
|
6890
6893
|
"contextSize": 8192,
|
|
6891
6894
|
"batchSize": 8192,
|
|
6892
6895
|
"microBatchSize": 512,
|
|
@@ -7163,6 +7166,28 @@ export type GeneratorQnnLlmActionAbortModelDownload = Action & {
|
|
|
7163
7166
|
__actionName: 'GENERATOR_QNN_LLM_ABORT_MODEL_DOWNLOAD'
|
|
7164
7167
|
}
|
|
7165
7168
|
|
|
7169
|
+
/* Pre-process the prompt, to prepare KV cache */
|
|
7170
|
+
export type GeneratorQnnLlmActionProcess = ActionWithParams & {
|
|
7171
|
+
__actionName: 'GENERATOR_QNN_LLM_PROCESS'
|
|
7172
|
+
params?: Array<
|
|
7173
|
+
| {
|
|
7174
|
+
input: 'prompt'
|
|
7175
|
+
value?: string | DataLink | EventProperty
|
|
7176
|
+
mapping?: string
|
|
7177
|
+
}
|
|
7178
|
+
| {
|
|
7179
|
+
input: 'messages'
|
|
7180
|
+
value?: Array<any> | DataLink | EventProperty
|
|
7181
|
+
mapping?: string
|
|
7182
|
+
}
|
|
7183
|
+
| {
|
|
7184
|
+
input: 'tools'
|
|
7185
|
+
value?: Array<any> | DataLink | EventProperty
|
|
7186
|
+
mapping?: string
|
|
7187
|
+
}
|
|
7188
|
+
>
|
|
7189
|
+
}
|
|
7190
|
+
|
|
7166
7191
|
/* Generate text */
|
|
7167
7192
|
export type GeneratorQnnLlmActionGenerate = ActionWithParams & {
|
|
7168
7193
|
__actionName: 'GENERATOR_QNN_LLM_GENERATE'
|
|
@@ -7205,6 +7230,10 @@ Default property:
|
|
|
7205
7230
|
"toolCallParser": "llama3_json",
|
|
7206
7231
|
"toolChoice": "auto",
|
|
7207
7232
|
"parallelToolCalls": false,
|
|
7233
|
+
"temperature": 0.8,
|
|
7234
|
+
"seed": 42,
|
|
7235
|
+
"topK": 40,
|
|
7236
|
+
"topP": 0.95,
|
|
7208
7237
|
"greedy": false
|
|
7209
7238
|
}
|
|
7210
7239
|
*/
|
|
@@ -7225,10 +7254,10 @@ Default property:
|
|
|
7225
7254
|
/* SOC model */
|
|
7226
7255
|
socModel?: 'X Elite' | 'X Plus' | '8 Elite' | '8 Gen 3' | 'QCS8550' | DataLink
|
|
7227
7256
|
/* Custom model base URL
|
|
7228
|
-
The
|
|
7257
|
+
The model should be bundled, for details see https://github.com/mybigday/node-qnn-llm?tab=readme-ov-file#bundled-file */
|
|
7229
7258
|
customModelUrl?: string | DataLink
|
|
7230
|
-
/* Custom model
|
|
7231
|
-
|
|
7259
|
+
/* Custom model MD5 */
|
|
7260
|
+
customModelMd5?: string | DataLink
|
|
7232
7261
|
/* Chat format */
|
|
7233
7262
|
chatFormat?:
|
|
7234
7263
|
| 'Llama 2'
|
|
@@ -7624,6 +7653,184 @@ export type GeneratorOpenAiTTS = Generator &
|
|
|
7624
7653
|
>
|
|
7625
7654
|
}
|
|
7626
7655
|
|
|
7656
|
+
/* Run text completion */
|
|
7657
|
+
export type GeneratorAnthropicLLMActionCompletion = ActionWithParams & {
|
|
7658
|
+
__actionName: 'GENERATOR_ANTHROPIC_LLM_COMPLETION'
|
|
7659
|
+
params?: Array<
|
|
7660
|
+
| {
|
|
7661
|
+
input: 'systemMessage'
|
|
7662
|
+
value?: string | DataLink | EventProperty
|
|
7663
|
+
mapping?: string
|
|
7664
|
+
}
|
|
7665
|
+
| {
|
|
7666
|
+
input: 'messages'
|
|
7667
|
+
value?: Array<any> | DataLink | EventProperty
|
|
7668
|
+
mapping?: string
|
|
7669
|
+
}
|
|
7670
|
+
| {
|
|
7671
|
+
input: 'maxTokens'
|
|
7672
|
+
value?: number | DataLink | EventProperty
|
|
7673
|
+
mapping?: string
|
|
7674
|
+
}
|
|
7675
|
+
| {
|
|
7676
|
+
input: 'temperature'
|
|
7677
|
+
value?: number | DataLink | EventProperty
|
|
7678
|
+
mapping?: string
|
|
7679
|
+
}
|
|
7680
|
+
| {
|
|
7681
|
+
input: 'topP'
|
|
7682
|
+
value?: number | DataLink | EventProperty
|
|
7683
|
+
mapping?: string
|
|
7684
|
+
}
|
|
7685
|
+
| {
|
|
7686
|
+
input: 'topK'
|
|
7687
|
+
value?: number | DataLink | EventProperty
|
|
7688
|
+
mapping?: string
|
|
7689
|
+
}
|
|
7690
|
+
| {
|
|
7691
|
+
input: 'stopSequences'
|
|
7692
|
+
value?: Array<any> | DataLink | EventProperty
|
|
7693
|
+
mapping?: string
|
|
7694
|
+
}
|
|
7695
|
+
| {
|
|
7696
|
+
input: 'tools'
|
|
7697
|
+
value?: Array<any> | DataLink | EventProperty
|
|
7698
|
+
mapping?: string
|
|
7699
|
+
}
|
|
7700
|
+
| {
|
|
7701
|
+
input: 'toolChoice'
|
|
7702
|
+
value?: {} | DataLink | EventProperty
|
|
7703
|
+
mapping?: string
|
|
7704
|
+
}
|
|
7705
|
+
>
|
|
7706
|
+
}
|
|
7707
|
+
|
|
7708
|
+
/* Stop text completion */
|
|
7709
|
+
export type GeneratorAnthropicLLMActionStopCompletion = Action & {
|
|
7710
|
+
__actionName: 'GENERATOR_ANTHROPIC_LLM_STOP_COMPLETION'
|
|
7711
|
+
}
|
|
7712
|
+
|
|
7713
|
+
interface GeneratorAnthropicLLMDef {
|
|
7714
|
+
/*
|
|
7715
|
+
Default property:
|
|
7716
|
+
{
|
|
7717
|
+
"apiEndpoint": "https://api.anthropic.com/v1/messages",
|
|
7718
|
+
"model": "claude-3-5-sonnet-20241022",
|
|
7719
|
+
"systemMessage": "You are a helpful assistant.",
|
|
7720
|
+
"completionMessages": [],
|
|
7721
|
+
"completionMaxTokens": 1024,
|
|
7722
|
+
"completionTemperature": 1,
|
|
7723
|
+
"completionTopP": 1,
|
|
7724
|
+
"completionStopSequences": []
|
|
7725
|
+
}
|
|
7726
|
+
*/
|
|
7727
|
+
property?: {
|
|
7728
|
+
/* API endpoint URL */
|
|
7729
|
+
apiEndpoint?: string | DataLink
|
|
7730
|
+
/* API key */
|
|
7731
|
+
apiKey?: string | DataLink
|
|
7732
|
+
/* Model name (Default: claude-3-5-sonnet-20241022) */
|
|
7733
|
+
model?: string | DataLink
|
|
7734
|
+
/* System message content */
|
|
7735
|
+
systemMessage?: string | DataLink
|
|
7736
|
+
/* Chat messages (user/assistant only) */
|
|
7737
|
+
completionMessages?:
|
|
7738
|
+
| Array<
|
|
7739
|
+
| DataLink
|
|
7740
|
+
| {
|
|
7741
|
+
role?: 'user' | 'assistant' | DataLink
|
|
7742
|
+
content?:
|
|
7743
|
+
| string
|
|
7744
|
+
| DataLink
|
|
7745
|
+
| Array<
|
|
7746
|
+
| DataLink
|
|
7747
|
+
| {
|
|
7748
|
+
type?: 'text' | 'image' | DataLink
|
|
7749
|
+
text?: string | DataLink
|
|
7750
|
+
source?:
|
|
7751
|
+
| DataLink
|
|
7752
|
+
| {
|
|
7753
|
+
type?: string | DataLink
|
|
7754
|
+
media_type?: string | DataLink
|
|
7755
|
+
data?: string | DataLink
|
|
7756
|
+
}
|
|
7757
|
+
}
|
|
7758
|
+
>
|
|
7759
|
+
| DataLink
|
|
7760
|
+
| DataLink
|
|
7761
|
+
}
|
|
7762
|
+
>
|
|
7763
|
+
| DataLink
|
|
7764
|
+
/* Tools for function calling following Anthropic format
|
|
7765
|
+
Format: Array of objects with {name, description, input_schema} structure
|
|
7766
|
+
See: https://docs.anthropic.com/en/docs/tool-use */
|
|
7767
|
+
completionTools?: Array<{} | DataLink> | DataLink
|
|
7768
|
+
/* Tool choice for function calling */
|
|
7769
|
+
completionToolChoice?:
|
|
7770
|
+
| DataLink
|
|
7771
|
+
| {
|
|
7772
|
+
type?: 'auto' | 'any' | 'tool' | DataLink
|
|
7773
|
+
name?: string | DataLink
|
|
7774
|
+
}
|
|
7775
|
+
/* Maximum tokens to generate */
|
|
7776
|
+
completionMaxTokens?: number | DataLink
|
|
7777
|
+
/* Temperature */
|
|
7778
|
+
completionTemperature?: number | DataLink
|
|
7779
|
+
/* Top P sampling */
|
|
7780
|
+
completionTopP?: number | DataLink
|
|
7781
|
+
/* Top K sampling */
|
|
7782
|
+
completionTopK?: number | DataLink
|
|
7783
|
+
/* Stop sequences */
|
|
7784
|
+
completionStopSequences?: Array<string | DataLink> | DataLink
|
|
7785
|
+
}
|
|
7786
|
+
events?: {
|
|
7787
|
+
/* Error event */
|
|
7788
|
+
onError?: Array<EventAction>
|
|
7789
|
+
/* Completion event */
|
|
7790
|
+
onCompletion?: Array<EventAction>
|
|
7791
|
+
/* Completion finished event */
|
|
7792
|
+
onCompletionFinished?: Array<EventAction>
|
|
7793
|
+
/* Tool use event */
|
|
7794
|
+
onToolUse?: Array<EventAction>
|
|
7795
|
+
}
|
|
7796
|
+
outlets?: {
|
|
7797
|
+
/* Evaluating outlet */
|
|
7798
|
+
isEvaluating?: () => Data
|
|
7799
|
+
/* Completion result outlet */
|
|
7800
|
+
completionResult?: () => Data
|
|
7801
|
+
/* Completion details outlet */
|
|
7802
|
+
completionDetails?: () => Data
|
|
7803
|
+
}
|
|
7804
|
+
}
|
|
7805
|
+
|
|
7806
|
+
/* LLM inference using Anthropic-compatible API endpoints
|
|
7807
|
+
|
|
7808
|
+
## Features
|
|
7809
|
+
- Compatible with Anthropic API format
|
|
7810
|
+
- Supports function calling (tools)
|
|
7811
|
+
- Streaming responses
|
|
7812
|
+
- Custom API endpoints (Default to https://api.anthropic.com/v1/messages) */
|
|
7813
|
+
export type GeneratorAnthropicLLM = Generator &
|
|
7814
|
+
GeneratorAnthropicLLMDef & {
|
|
7815
|
+
templateKey: 'GENERATOR_ANTHROPIC_LLM'
|
|
7816
|
+
switches: Array<
|
|
7817
|
+
SwitchDef &
|
|
7818
|
+
GeneratorAnthropicLLMDef & {
|
|
7819
|
+
conds?: Array<{
|
|
7820
|
+
method: '==' | '!=' | '>' | '<' | '>=' | '<='
|
|
7821
|
+
cond:
|
|
7822
|
+
| SwitchCondInnerStateCurrentCanvas
|
|
7823
|
+
| SwitchCondData
|
|
7824
|
+
| {
|
|
7825
|
+
__typename: 'SwitchCondInnerStateOutlet'
|
|
7826
|
+
outlet: 'isEvaluating' | 'completionResult' | 'completionDetails'
|
|
7827
|
+
value: any
|
|
7828
|
+
}
|
|
7829
|
+
}>
|
|
7830
|
+
}
|
|
7831
|
+
>
|
|
7832
|
+
}
|
|
7833
|
+
|
|
7627
7834
|
/* Add a message to the assistant */
|
|
7628
7835
|
export type GeneratorAssistantActionAddMessage = ActionWithParams & {
|
|
7629
7836
|
__actionName: 'GENERATOR_ASSISTANT_ADD_MESSAGE'
|
package/utils/event-props.ts
CHANGED
|
@@ -884,6 +884,23 @@ export const templateEventPropsMap = {
|
|
|
884
884
|
'GENERATOR_OPENAI_TTS_ERROR', // type: string
|
|
885
885
|
],
|
|
886
886
|
},
|
|
887
|
+
GENERATOR_ANTHROPIC_LLM: {
|
|
888
|
+
onError: [
|
|
889
|
+
'GENERATOR_ANTHROPIC_LLM_ERROR', // type: string
|
|
890
|
+
],
|
|
891
|
+
onCompletion: [
|
|
892
|
+
'GENERATOR_ANTHROPIC_LLM_COMPLETION_RESULT', // type: string
|
|
893
|
+
'GENERATOR_ANTHROPIC_LLM_COMPLETION_DETAILS', // type: object
|
|
894
|
+
],
|
|
895
|
+
onCompletionFinished: [
|
|
896
|
+
'GENERATOR_ANTHROPIC_LLM_COMPLETION_RESULT', // type: string
|
|
897
|
+
'GENERATOR_ANTHROPIC_LLM_TOOL_USES', // type: array
|
|
898
|
+
],
|
|
899
|
+
onToolUse: [
|
|
900
|
+
'GENERATOR_ANTHROPIC_LLM_TOOL_USE_NAME', // type: string
|
|
901
|
+
'GENERATOR_ANTHROPIC_LLM_TOOL_USE_INPUT', // type: object
|
|
902
|
+
],
|
|
903
|
+
},
|
|
887
904
|
GENERATOR_ASSISTANT: {
|
|
888
905
|
onError: [
|
|
889
906
|
'GENERATOR_ASSISTANT_ERROR', // type: string
|