@micrantha/react-native-amaryllis 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/Amaryllis.podspec +22 -0
- package/LICENSE +21 -0
- package/README.md +213 -0
- package/android/build.gradle +81 -0
- package/android/gradle.properties +5 -0
- package/android/src/main/AndroidManifest.xml +2 -0
- package/android/src/main/java/com/micrantha/amaryllis/Amaryllis.kt +198 -0
- package/android/src/main/java/com/micrantha/amaryllis/AmaryllisModule.kt +165 -0
- package/android/src/main/java/com/micrantha/amaryllis/AmaryllisPackage.kt +32 -0
- package/ios/Amaryllis.h +49 -0
- package/ios/Amaryllis.m +201 -0
- package/ios/AmaryllisModule.h +6 -0
- package/ios/AmaryllisModule.mm +166 -0
- package/lib/module/Amaryllis.js +56 -0
- package/lib/module/Amaryllis.js.map +1 -0
- package/lib/module/AmaryllisContext.js +56 -0
- package/lib/module/AmaryllisContext.js.map +1 -0
- package/lib/module/AmaryllisHooks.js +78 -0
- package/lib/module/AmaryllisHooks.js.map +1 -0
- package/lib/module/AmaryllisRx.js +31 -0
- package/lib/module/AmaryllisRx.js.map +1 -0
- package/lib/module/NativeAmaryllis.js +5 -0
- package/lib/module/NativeAmaryllis.js.map +1 -0
- package/lib/module/NativePipe.js +9 -0
- package/lib/module/NativePipe.js.map +1 -0
- package/lib/module/Types.js +4 -0
- package/lib/module/Types.js.map +1 -0
- package/lib/module/index.js +7 -0
- package/lib/module/index.js.map +1 -0
- package/lib/module/package.json +1 -0
- package/lib/typescript/package.json +1 -0
- package/lib/typescript/src/Amaryllis.d.ts +16 -0
- package/lib/typescript/src/Amaryllis.d.ts.map +1 -0
- package/lib/typescript/src/AmaryllisContext.d.ts +8 -0
- package/lib/typescript/src/AmaryllisContext.d.ts.map +1 -0
- package/lib/typescript/src/AmaryllisHooks.d.ts +4 -0
- package/lib/typescript/src/AmaryllisHooks.d.ts.map +1 -0
- package/lib/typescript/src/AmaryllisRx.d.ts +3 -0
- package/lib/typescript/src/AmaryllisRx.d.ts.map +1 -0
- package/lib/typescript/src/NativeAmaryllis.d.ts +12 -0
- package/lib/typescript/src/NativeAmaryllis.d.ts.map +1 -0
- package/lib/typescript/src/NativePipe.d.ts +3 -0
- package/lib/typescript/src/NativePipe.d.ts.map +1 -0
- package/lib/typescript/src/Types.d.ts +87 -0
- package/lib/typescript/src/Types.d.ts.map +1 -0
- package/lib/typescript/src/index.d.ts +6 -0
- package/lib/typescript/src/index.d.ts.map +1 -0
- package/package.json +185 -0
- package/src/Amaryllis.ts +91 -0
- package/src/AmaryllisContext.tsx +53 -0
- package/src/AmaryllisHooks.tsx +78 -0
- package/src/AmaryllisRx.ts +24 -0
- package/src/NativeAmaryllis.ts +18 -0
- package/src/NativePipe.ts +8 -0
- package/src/Types.ts +115 -0
- package/src/index.tsx +5 -0
package/src/Amaryllis.ts
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import type {
|
|
2
|
+
LlmEngine,
|
|
3
|
+
LlmEngineConfig,
|
|
4
|
+
LlmSessionParams,
|
|
5
|
+
LlmCallbacks,
|
|
6
|
+
LlmRequestParams,
|
|
7
|
+
LlmEventEmitter,
|
|
8
|
+
LlmEventSubscription,
|
|
9
|
+
LlmPipeParams,
|
|
10
|
+
LlmNativeEngine,
|
|
11
|
+
} from './Types';
|
|
12
|
+
|
|
13
|
+
const EVENT_ON_PARTIAL_RESULT = 'onPartialResult';
|
|
14
|
+
const EVENT_ON_FINAL_RESULT = 'onFinalResult';
|
|
15
|
+
const EVENT_ON_ERROR = 'onError';
|
|
16
|
+
|
|
17
|
+
export class LlmPipe implements LlmEngine {
|
|
18
|
+
subscriptions: LlmEventSubscription[] = [];
|
|
19
|
+
llmEmitter: LlmEventEmitter;
|
|
20
|
+
llmNative: LlmNativeEngine;
|
|
21
|
+
|
|
22
|
+
constructor(params: LlmPipeParams) {
|
|
23
|
+
this.llmNative = params.nativeModule;
|
|
24
|
+
this.llmEmitter = params.eventEmitter;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
async init(params: LlmEngineConfig): Promise<void> {
|
|
28
|
+
await this.llmNative.init(params);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
newSession(params: LlmSessionParams): Promise<void> {
|
|
32
|
+
return this.llmNative.newSession(params);
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
async generate(params: LlmRequestParams): Promise<string> {
|
|
36
|
+
return await this.llmNative.generate(params);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
async generateAsync(
|
|
40
|
+
params: LlmRequestParams,
|
|
41
|
+
callbacks?: LlmCallbacks
|
|
42
|
+
): Promise<void> {
|
|
43
|
+
if (callbacks) {
|
|
44
|
+
this.setupAsyncCallbacks(callbacks);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
return await this.llmNative.generateAsync(params);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
close(): void {
|
|
51
|
+
this.llmNative.close();
|
|
52
|
+
this.cancelAsync();
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
cancelAsync(): void {
|
|
56
|
+
this.llmNative.cancelAsync();
|
|
57
|
+
this.subscriptions.forEach((sub) => sub.remove());
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
setupAsyncCallbacks(callbacks: LlmCallbacks): void {
|
|
61
|
+
if (callbacks.onPartialResult) {
|
|
62
|
+
this.subscriptions.push(
|
|
63
|
+
this.llmEmitter.addListener(
|
|
64
|
+
EVENT_ON_PARTIAL_RESULT,
|
|
65
|
+
(result: string) => {
|
|
66
|
+
callbacks.onPartialResult?.(result);
|
|
67
|
+
}
|
|
68
|
+
)
|
|
69
|
+
);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
if (callbacks.onFinalResult) {
|
|
73
|
+
this.subscriptions.push(
|
|
74
|
+
this.llmEmitter.addListener(EVENT_ON_FINAL_RESULT, (result: string) => {
|
|
75
|
+
callbacks.onFinalResult?.(result);
|
|
76
|
+
this.cancelAsync();
|
|
77
|
+
})
|
|
78
|
+
);
|
|
79
|
+
}
|
|
80
|
+
if (callbacks.onError) {
|
|
81
|
+
this.subscriptions.push(
|
|
82
|
+
this.llmEmitter.addListener(EVENT_ON_ERROR, (error: string) => {
|
|
83
|
+
callbacks.onError?.(new Error(error));
|
|
84
|
+
this.cancelAsync();
|
|
85
|
+
})
|
|
86
|
+
);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
export default LlmPipe;
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import { createContext, useContext, useEffect, useMemo, useState } from 'react';
|
|
2
|
+
import type { LLMContextValue, LLMProviderProps } from './Types';
|
|
3
|
+
import { newLlmPipe } from './NativePipe';
|
|
4
|
+
|
|
5
|
+
const LLMContext = createContext<LLMContextValue>({
|
|
6
|
+
config: null,
|
|
7
|
+
controller: null,
|
|
8
|
+
error: undefined,
|
|
9
|
+
isReady: false,
|
|
10
|
+
});
|
|
11
|
+
|
|
12
|
+
export const useLLMContext = () => useContext(LLMContext);
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Provides LLM configuration state to child components.
|
|
16
|
+
* Configures LLM once on mount.
|
|
17
|
+
*/
|
|
18
|
+
export const LLMProvider = ({
|
|
19
|
+
config,
|
|
20
|
+
llmPipe,
|
|
21
|
+
children,
|
|
22
|
+
}: LLMProviderProps) => {
|
|
23
|
+
const [error, setError] = useState<Error | undefined>();
|
|
24
|
+
const [ready, setReady] = useState(false);
|
|
25
|
+
const controller = useMemo(() => {
|
|
26
|
+
try {
|
|
27
|
+
return llmPipe ?? newLlmPipe();
|
|
28
|
+
} catch (e: any) {
|
|
29
|
+
setError(e);
|
|
30
|
+
return null;
|
|
31
|
+
}
|
|
32
|
+
}, [llmPipe]);
|
|
33
|
+
|
|
34
|
+
useEffect(() => {
|
|
35
|
+
const start = async () => {
|
|
36
|
+
try {
|
|
37
|
+
await controller?.init(config);
|
|
38
|
+
setReady(true);
|
|
39
|
+
} catch (e: any) {
|
|
40
|
+
console.error('unable to start amaryllis', e);
|
|
41
|
+
setError(e);
|
|
42
|
+
}
|
|
43
|
+
};
|
|
44
|
+
start();
|
|
45
|
+
return () => controller?.close();
|
|
46
|
+
}, [config, controller]);
|
|
47
|
+
|
|
48
|
+
return (
|
|
49
|
+
<LLMContext.Provider value={{ config, controller, isReady: ready, error }}>
|
|
50
|
+
{children}
|
|
51
|
+
</LLMContext.Provider>
|
|
52
|
+
);
|
|
53
|
+
};
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import { useCallback, useMemo, useEffect } from 'react';
|
|
2
|
+
import type { LlmRequestParams, InferenceProps } from './Types';
|
|
3
|
+
import { useLLMContext } from './AmaryllisContext';
|
|
4
|
+
import { createLLMObservable } from './AmaryllisRx';
|
|
5
|
+
|
|
6
|
+
export const useInferenceAsync = (props: InferenceProps = {}) => {
|
|
7
|
+
const { controller } = useLLMContext();
|
|
8
|
+
const { onResult, onGenerate, onError, onComplete } = props;
|
|
9
|
+
|
|
10
|
+
const llm$ = useMemo(() => createLLMObservable(), []);
|
|
11
|
+
|
|
12
|
+
const generate = useCallback(
|
|
13
|
+
async (params: LlmRequestParams) => {
|
|
14
|
+
try {
|
|
15
|
+
onGenerate?.();
|
|
16
|
+
await controller?.generateAsync(params, llm$.callbacks);
|
|
17
|
+
} catch (err) {
|
|
18
|
+
onError?.(
|
|
19
|
+
err instanceof Error ? err : new Error('An unknown error occurred')
|
|
20
|
+
);
|
|
21
|
+
}
|
|
22
|
+
return () => {
|
|
23
|
+
controller?.cancelAsync();
|
|
24
|
+
onComplete?.();
|
|
25
|
+
};
|
|
26
|
+
},
|
|
27
|
+
[controller, llm$.callbacks, onComplete, onGenerate, onError]
|
|
28
|
+
);
|
|
29
|
+
|
|
30
|
+
useEffect(() => {
|
|
31
|
+
const sub = llm$.observable.subscribe({
|
|
32
|
+
next: ({ text, isFinal }) => {
|
|
33
|
+
onResult?.(text, isFinal);
|
|
34
|
+
},
|
|
35
|
+
complete: () => onComplete?.(),
|
|
36
|
+
error: (err) => onError?.(err),
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
return () => {
|
|
40
|
+
sub.unsubscribe();
|
|
41
|
+
controller?.cancelAsync();
|
|
42
|
+
};
|
|
43
|
+
}, [llm$.observable, controller, onResult, onComplete, onError]);
|
|
44
|
+
return generate;
|
|
45
|
+
};
|
|
46
|
+
|
|
47
|
+
export const useInference = (props: InferenceProps = {}) => {
|
|
48
|
+
const { controller, error: contextError } = useLLMContext();
|
|
49
|
+
const { onResult, onError, onGenerate, onComplete } = props;
|
|
50
|
+
|
|
51
|
+
useEffect(() => {
|
|
52
|
+
if (contextError) {
|
|
53
|
+
onError?.(contextError);
|
|
54
|
+
}
|
|
55
|
+
}, [contextError, onError]);
|
|
56
|
+
|
|
57
|
+
const generate = useCallback(
|
|
58
|
+
async (params: LlmRequestParams) => {
|
|
59
|
+
try {
|
|
60
|
+
onGenerate?.();
|
|
61
|
+
const response = await controller?.generate(params);
|
|
62
|
+
onResult?.(response ?? '', true);
|
|
63
|
+
} catch (err) {
|
|
64
|
+
onError?.(
|
|
65
|
+
err instanceof Error ? err : new Error('An unknown error occurred')
|
|
66
|
+
);
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
return () => {
|
|
70
|
+
controller?.cancelAsync();
|
|
71
|
+
onComplete?.();
|
|
72
|
+
};
|
|
73
|
+
},
|
|
74
|
+
[onGenerate, controller, onResult, onError, onComplete]
|
|
75
|
+
);
|
|
76
|
+
|
|
77
|
+
return generate;
|
|
78
|
+
};
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import type { LlmCallbacks, LLMObservableResult, LLMResult } from './Types';
|
|
2
|
+
import { Observable, Subscriber } from 'rxjs';
|
|
3
|
+
|
|
4
|
+
export function createLLMObservable(): LLMObservableResult {
|
|
5
|
+
let subscriber: Subscriber<LLMResult>;
|
|
6
|
+
|
|
7
|
+
const observable = new Observable<LLMResult>((sub) => {
|
|
8
|
+
subscriber = sub;
|
|
9
|
+
});
|
|
10
|
+
|
|
11
|
+
const callbacks: LlmCallbacks = {
|
|
12
|
+
onPartialResult: (partial) => {
|
|
13
|
+
subscriber?.next({ text: partial, isFinal: false });
|
|
14
|
+
},
|
|
15
|
+
onFinalResult: (final) => {
|
|
16
|
+
subscriber?.next({ text: final, isFinal: true });
|
|
17
|
+
},
|
|
18
|
+
onError: (error) => {
|
|
19
|
+
subscriber?.error(error);
|
|
20
|
+
},
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
return { observable, callbacks };
|
|
24
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import type { TurboModule } from 'react-native';
|
|
2
|
+
import { TurboModuleRegistry } from 'react-native';
|
|
3
|
+
|
|
4
|
+
export interface Spec extends TurboModule {
|
|
5
|
+
init(params: Object): Promise<void>;
|
|
6
|
+
|
|
7
|
+
newSession(params: Object): Promise<void>;
|
|
8
|
+
|
|
9
|
+
generate(params: Object): Promise<string>;
|
|
10
|
+
|
|
11
|
+
generateAsync(params: Object): Promise<void>;
|
|
12
|
+
|
|
13
|
+
close(): void;
|
|
14
|
+
|
|
15
|
+
cancelAsync(): void;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export default TurboModuleRegistry.getEnforcing<Spec>('Amaryllis');
|
package/src/Types.ts
ADDED
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
import type { Spec } from './NativeAmaryllis';
|
|
2
|
+
import type { Observable } from 'rxjs';
|
|
3
|
+
|
|
4
|
+
export type LlmNativeEngine = Spec;
|
|
5
|
+
|
|
6
|
+
export type LlmCallbacks = {
|
|
7
|
+
// Async streaming callbacks
|
|
8
|
+
onPartialResult?: (result: string) => void;
|
|
9
|
+
onFinalResult?: (result: string) => void;
|
|
10
|
+
onError?: (err: Error) => void;
|
|
11
|
+
};
|
|
12
|
+
|
|
13
|
+
// Core parameter object for configuration and request options
|
|
14
|
+
export type LlmRequestParams = {
|
|
15
|
+
// Required
|
|
16
|
+
prompt: string;
|
|
17
|
+
// Multimodal support
|
|
18
|
+
images?: string[];
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
export type LlmSessionParams = {
|
|
22
|
+
// Optional generation settings
|
|
23
|
+
topK?: number; // default: 40
|
|
24
|
+
topP?: number; // default: 0.95
|
|
25
|
+
temperature?: number; // default: 0.8
|
|
26
|
+
randomSeed?: number; // default: 0
|
|
27
|
+
loraPath?: string; // LoRA customization (GPU only)
|
|
28
|
+
enableVisionModality?: boolean;
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
// Initialization/configuration for the engine
|
|
32
|
+
export type LlmEngineConfig = {
|
|
33
|
+
modelPath: string; // Required: .task model path on device
|
|
34
|
+
maxTopK?: number; // default: 64 (for session initialization)
|
|
35
|
+
maxNumImages?: number; // default: 1
|
|
36
|
+
maxTokens?: number; // default: 512
|
|
37
|
+
visionEncoderPath?: string; // Optional: vision encoder model path for multimodal
|
|
38
|
+
visionAdapterPath?: string; // Optional: vision adapter model path for multimodal
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
// Unified LLM interface
|
|
42
|
+
export type LlmEngine = {
|
|
43
|
+
/**
|
|
44
|
+
* Initialize the engine (creates LlmInference and LlmInferenceSession internally).
|
|
45
|
+
*/
|
|
46
|
+
init(config: LlmEngineConfig): Promise<void>;
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* Start a new session.
|
|
50
|
+
*/
|
|
51
|
+
newSession(params: LlmSessionParams): Promise<void>;
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* Generate a response synchronously (blocking).
|
|
55
|
+
*/
|
|
56
|
+
generate(params: LlmRequestParams): Promise<string>;
|
|
57
|
+
|
|
58
|
+
/**
|
|
59
|
+
* Generate a response asynchronously (streaming).
|
|
60
|
+
*/
|
|
61
|
+
generateAsync(
|
|
62
|
+
params: LlmRequestParams,
|
|
63
|
+
callbacks?: LlmCallbacks
|
|
64
|
+
): Promise<void>;
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Clean up resources.
|
|
68
|
+
*/
|
|
69
|
+
close(): void;
|
|
70
|
+
|
|
71
|
+
cancelAsync(): void;
|
|
72
|
+
};
|
|
73
|
+
|
|
74
|
+
export interface LlmEventSubscription {
|
|
75
|
+
remove: () => void;
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
export interface LlmEventEmitter {
|
|
79
|
+
addListener(event: string, cb: (result: any) => void): LlmEventSubscription;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
export interface LlmPipeParams {
|
|
83
|
+
nativeModule: LlmNativeEngine;
|
|
84
|
+
eventEmitter: LlmEventEmitter;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
export interface LLMContextValue {
|
|
88
|
+
config: LlmEngineConfig | null;
|
|
89
|
+
controller: LlmEngine | null;
|
|
90
|
+
error: Error | undefined;
|
|
91
|
+
isReady: boolean;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
export interface LLMProviderProps {
|
|
95
|
+
config: LlmEngineConfig;
|
|
96
|
+
llmPipe?: LlmEngine;
|
|
97
|
+
children: React.ReactNode;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
export type InferenceProps = {
|
|
101
|
+
onGenerate?: () => void;
|
|
102
|
+
onResult?: (result: string, isFinal: boolean) => void;
|
|
103
|
+
onError?: (error: Error) => void;
|
|
104
|
+
onComplete?: () => void;
|
|
105
|
+
};
|
|
106
|
+
|
|
107
|
+
export interface LLMResult {
|
|
108
|
+
text: string;
|
|
109
|
+
isFinal: boolean;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
export interface LLMObservableResult {
|
|
113
|
+
callbacks: LlmCallbacks;
|
|
114
|
+
observable: Observable<LLMResult>;
|
|
115
|
+
}
|