@volley/recognition-client-sdk-node22 0.1.424

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/README.md +344 -0
  2. package/dist/browser.bundled.d.ts +1280 -0
  3. package/dist/browser.d.ts +10 -0
  4. package/dist/browser.d.ts.map +1 -0
  5. package/dist/config-builder.d.ts +134 -0
  6. package/dist/config-builder.d.ts.map +1 -0
  7. package/dist/errors.d.ts +41 -0
  8. package/dist/errors.d.ts.map +1 -0
  9. package/dist/factory.d.ts +36 -0
  10. package/dist/factory.d.ts.map +1 -0
  11. package/dist/index.bundled.d.ts +2572 -0
  12. package/dist/index.d.ts +16 -0
  13. package/dist/index.d.ts.map +1 -0
  14. package/dist/index.js +10199 -0
  15. package/dist/index.js.map +7 -0
  16. package/dist/recog-client-sdk.browser.d.ts +10 -0
  17. package/dist/recog-client-sdk.browser.d.ts.map +1 -0
  18. package/dist/recog-client-sdk.browser.js +5746 -0
  19. package/dist/recog-client-sdk.browser.js.map +7 -0
  20. package/dist/recognition-client.d.ts +128 -0
  21. package/dist/recognition-client.d.ts.map +1 -0
  22. package/dist/recognition-client.types.d.ts +271 -0
  23. package/dist/recognition-client.types.d.ts.map +1 -0
  24. package/dist/simplified-vgf-recognition-client.d.ts +178 -0
  25. package/dist/simplified-vgf-recognition-client.d.ts.map +1 -0
  26. package/dist/utils/audio-ring-buffer.d.ts +69 -0
  27. package/dist/utils/audio-ring-buffer.d.ts.map +1 -0
  28. package/dist/utils/message-handler.d.ts +45 -0
  29. package/dist/utils/message-handler.d.ts.map +1 -0
  30. package/dist/utils/url-builder.d.ts +28 -0
  31. package/dist/utils/url-builder.d.ts.map +1 -0
  32. package/dist/vgf-recognition-mapper.d.ts +66 -0
  33. package/dist/vgf-recognition-mapper.d.ts.map +1 -0
  34. package/dist/vgf-recognition-state.d.ts +91 -0
  35. package/dist/vgf-recognition-state.d.ts.map +1 -0
  36. package/package.json +74 -0
  37. package/src/browser.ts +24 -0
  38. package/src/config-builder.spec.ts +265 -0
  39. package/src/config-builder.ts +240 -0
  40. package/src/errors.ts +84 -0
  41. package/src/factory.spec.ts +215 -0
  42. package/src/factory.ts +47 -0
  43. package/src/index.ts +127 -0
  44. package/src/recognition-client.spec.ts +889 -0
  45. package/src/recognition-client.ts +844 -0
  46. package/src/recognition-client.types.ts +338 -0
  47. package/src/simplified-vgf-recognition-client.integration.spec.ts +718 -0
  48. package/src/simplified-vgf-recognition-client.spec.ts +1525 -0
  49. package/src/simplified-vgf-recognition-client.ts +524 -0
  50. package/src/utils/audio-ring-buffer.spec.ts +335 -0
  51. package/src/utils/audio-ring-buffer.ts +170 -0
  52. package/src/utils/message-handler.spec.ts +311 -0
  53. package/src/utils/message-handler.ts +131 -0
  54. package/src/utils/url-builder.spec.ts +252 -0
  55. package/src/utils/url-builder.ts +92 -0
  56. package/src/vgf-recognition-mapper.spec.ts +78 -0
  57. package/src/vgf-recognition-mapper.ts +232 -0
  58. package/src/vgf-recognition-state.ts +102 -0
@@ -0,0 +1,45 @@
1
+ /**
2
+ * Message Handler for Recognition Client
3
+ * Routes incoming WebSocket messages to appropriate callbacks
4
+ */
5
+ import { type TranscriptionResultV1, type FunctionCallResultV1, type MetadataResultV1, type ErrorResultV1, type ClientControlMessageV1 } from '@recog/shared-types';
6
+ export interface MessageHandlerCallbacks {
7
+ onTranscript: (result: TranscriptionResultV1) => void;
8
+ onFunctionCall: (result: FunctionCallResultV1) => void;
9
+ onMetadata: (metadata: MetadataResultV1) => void;
10
+ onError: (error: ErrorResultV1) => void;
11
+ onControlMessage: (msg: ClientControlMessageV1) => void;
12
+ logger?: (level: 'debug' | 'info' | 'warn' | 'error', message: string, data?: any) => void;
13
+ }
14
+ export declare class MessageHandler {
15
+ private firstTranscriptTime;
16
+ private sessionStartTime;
17
+ private callbacks;
18
+ constructor(callbacks: MessageHandlerCallbacks);
19
+ /**
20
+ * Set session start time for performance tracking
21
+ */
22
+ setSessionStartTime(time: number): void;
23
+ /**
24
+ * Handle incoming WebSocket message
25
+ */
26
+ handleMessage(msg: {
27
+ v: number;
28
+ type: string;
29
+ data: any;
30
+ }): void;
31
+ /**
32
+ * Handle transcript message and track performance metrics
33
+ * @param result - The transcription result from the server
34
+ */
35
+ private handleTranscription;
36
+ /**
37
+ * Get performance metrics
38
+ */
39
+ getMetrics(): {
40
+ sessionStartTime: number | null;
41
+ firstTranscriptTime: number | null;
42
+ timeToFirstTranscript: number | null;
43
+ };
44
+ }
45
+ //# sourceMappingURL=message-handler.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"message-handler.d.ts","sourceRoot":"","sources":["../../src/utils/message-handler.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAGL,KAAK,qBAAqB,EAC1B,KAAK,oBAAoB,EACzB,KAAK,gBAAgB,EACrB,KAAK,aAAa,EAClB,KAAK,sBAAsB,EAC5B,MAAM,qBAAqB,CAAC;AAE7B,MAAM,WAAW,uBAAuB;IACtC,YAAY,EAAE,CAAC,MAAM,EAAE,qBAAqB,KAAK,IAAI,CAAC;IACtD,cAAc,EAAE,CAAC,MAAM,EAAE,oBAAoB,KAAK,IAAI,CAAC;IACvD,UAAU,EAAE,CAAC,QAAQ,EAAE,gBAAgB,KAAK,IAAI,CAAC;IACjD,OAAO,EAAE,CAAC,KAAK,EAAE,aAAa,KAAK,IAAI,CAAC;IACxC,gBAAgB,EAAE,CAAC,GAAG,EAAE,sBAAsB,KAAK,IAAI,CAAC;IACxD,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,OAAO,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,GAAG,KAAK,IAAI,CAAC;CAC5F;AAED,qBAAa,cAAc;IACzB,OAAO,CAAC,mBAAmB,CAAuB;IAClD,OAAO,CAAC,gBAAgB,CAAuB;IAC/C,OAAO,CAAC,SAAS,CAA0B;gBAE/B,SAAS,EAAE,uBAAuB;IAI9C;;OAEG;IACH,mBAAmB,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI;IAIvC;;OAEG;IACH,aAAa,CAAC,GAAG,EAAE;QAAE,CAAC,EAAE,MAAM,CAAC;QAAC,IAAI,EAAE,MAAM,CAAC;QAAC,IAAI,EAAE,GAAG,CAAA;KAAE,GAAG,IAAI;IAsDhE;;;OAGG;IACH,OAAO,CAAC,mBAAmB;IAgB3B;;OAEG;IACH,UAAU;;;;;CAUX"}
@@ -0,0 +1,28 @@
1
+ /**
2
+ * URL Builder for Recognition Client
3
+ * Handles WebSocket URL construction with query parameters
4
+ */
5
+ import type { GameContextV1, Stage } from '@recog/shared-types';
6
+ import type { RecognitionCallbackUrl } from '../recognition-client.types.js';
7
+ export interface UrlBuilderConfig {
8
+ url?: string;
9
+ stage?: Stage | string;
10
+ audioUtteranceId: string;
11
+ callbackUrls?: RecognitionCallbackUrl[];
12
+ userId?: string;
13
+ gameSessionId?: string;
14
+ deviceId?: string;
15
+ accountId?: string;
16
+ questionAnswerId?: string;
17
+ platform?: string;
18
+ gameContext?: GameContextV1;
19
+ /** Standalone gameId - takes precedence over gameContext.gameId if both provided */
20
+ gameId?: string;
21
+ }
22
+ /**
23
+ * Build WebSocket URL with all query parameters
24
+ * Either `url` or `stage` must be provided (or defaults to production if neither provided)
25
+ * If both are provided, `url` takes precedence over `stage`
26
+ */
27
+ export declare function buildWebSocketUrl(config: UrlBuilderConfig): string;
28
+ //# sourceMappingURL=url-builder.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"url-builder.d.ts","sourceRoot":"","sources":["../../src/utils/url-builder.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAGH,OAAO,KAAK,EAAE,aAAa,EAAE,KAAK,EAAE,MAAM,qBAAqB,CAAC;AAChE,OAAO,KAAK,EAAE,sBAAsB,EAAE,MAAM,gCAAgC,CAAC;AAE7E,MAAM,WAAW,gBAAgB;IAC/B,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,KAAK,CAAC,EAAE,KAAK,GAAG,MAAM,CAAC;IACvB,gBAAgB,EAAE,MAAM,CAAC;IACzB,YAAY,CAAC,EAAE,sBAAsB,EAAE,CAAC;IACxC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,WAAW,CAAC,EAAE,aAAa,CAAC;IAC5B,oFAAoF;IACpF,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAED;;;;GAIG;AACH,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,gBAAgB,GAAG,MAAM,CA6DlE"}
@@ -0,0 +1,66 @@
1
+ /**
2
+ * VGF Recognition Mapper
3
+ *
4
+ * Maps between the existing recognition client types and the simplified VGF state.
5
+ * This provides a clean abstraction layer for game developers.
6
+ */
7
+ import { RecognitionState } from './vgf-recognition-state.js';
8
+ import { ClientState, IRecognitionClientConfig } from './recognition-client.types.js';
9
+ import { TranscriptionResultV1, ErrorResultV1 } from '@recog/shared-types';
10
+ /**
11
+ * Maps ClientState to RecordingStatus for VGF state
12
+ */
13
+ export declare function mapClientStateToRecordingStatus(clientState: ClientState): string;
14
+ /**
15
+ * Creates a VGF state from transcription result
16
+ */
17
+ export declare function mapTranscriptionResultToState(currentState: RecognitionState, result: TranscriptionResultV1, isRecording: boolean): RecognitionState;
18
+ /**
19
+ * Maps error to state
20
+ */
21
+ export declare function mapErrorToState(currentState: RecognitionState, error: ErrorResultV1): RecognitionState;
22
+ /**
23
+ * Creates initial VGF state from client config
24
+ */
25
+ export declare function createVGFStateFromConfig(config: IRecognitionClientConfig): RecognitionState;
26
+ /**
27
+ * Updates state when recording stops
28
+ */
29
+ export declare function updateStateOnStop(currentState: RecognitionState): RecognitionState;
30
+ /**
31
+ * Resets session state with a new UUID.
32
+ *
33
+ * This creates a fresh session state while preserving non-session fields
34
+ * (like promptSlotMap, asrConfig, etc.)
35
+ *
36
+ * Resets:
37
+ * - audioUtteranceId → new UUID
38
+ * - transcriptionStatus → NOT_STARTED
39
+ * - startRecordingStatus → READY
40
+ * - recognitionActionProcessingState → NOT_STARTED
41
+ * - finalTranscript → undefined
42
+ *
43
+ * @param currentState - The current recognition state
44
+ * @returns A new state with reset session fields and a new UUID
45
+ */
46
+ export declare function resetRecognitionVGFState(currentState: RecognitionState): RecognitionState;
47
+ /**
48
+ * Updates state when client becomes ready
49
+ */
50
+ export declare function updateStateOnReady(currentState: RecognitionState): RecognitionState;
51
+ /**
52
+ * Parses function call from transcript (STEP 3 support)
53
+ * This is a placeholder - actual implementation would use NLP/LLM
54
+ */
55
+ export declare function extractFunctionCallFromTranscript(transcript: string, gameContext?: any): {
56
+ metadata?: string;
57
+ confidence?: number;
58
+ } | null;
59
+ /**
60
+ * Updates state with function call results (STEP 3)
61
+ */
62
+ export declare function updateStateWithFunctionCall(currentState: RecognitionState, functionCall: {
63
+ metadata?: string;
64
+ confidence?: number;
65
+ }): RecognitionState;
66
+ //# sourceMappingURL=vgf-recognition-mapper.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"vgf-recognition-mapper.d.ts","sourceRoot":"","sources":["../src/vgf-recognition-mapper.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,EACH,gBAAgB,EAKnB,MAAM,4BAA4B,CAAC;AAEpC,OAAO,EACH,WAAW,EACX,wBAAwB,EAC3B,MAAM,+BAA+B,CAAC;AACvC,OAAO,EACH,qBAAqB,EACrB,aAAa,EAChB,MAAM,qBAAqB,CAAC;AAE7B;;GAEG;AACH,wBAAgB,+BAA+B,CAAC,WAAW,EAAE,WAAW,GAAG,MAAM,CAmBhF;AAED;;GAEG;AACH,wBAAgB,6BAA6B,CACzC,YAAY,EAAE,gBAAgB,EAC9B,MAAM,EAAE,qBAAqB,EAC7B,WAAW,EAAE,OAAO,GACrB,gBAAgB,CAgDlB;AAED;;GAEG;AACH,wBAAgB,eAAe,CAC3B,YAAY,EAAE,gBAAgB,EAC9B,KAAK,EAAE,aAAa,GACrB,gBAAgB,CAOlB;AAED;;GAEG;AACH,wBAAgB,wBAAwB,CAAC,MAAM,EAAE,wBAAwB,GAAG,gBAAgB,CAU3F;AAED;;GAEG;AACH,wBAAgB,iBAAiB,CAAC,YAAY,EAAE,gBAAgB,GAAG,gBAAgB,CAMlF;AAED;;;;;;;;;;;;;;;GAeG;AACH,wBAAgB,wBAAwB,CAAC,YAAY,EAAE,gBAAgB,GAAG,gBAAgB,CASzF;AAED;;GAEG;AACH,wBAAgB,kBAAkB,CAAC,YAAY,EAAE,gBAAgB,GAAG,gBAAgB,CAKnF;AAED;;;GAGG;AACH,wBAAgB,iCAAiC,CAC7C,UAAU,EAAE,MAAM,EAClB,WAAW,CAAC,EAAE,GAAG,GAClB;IAAE,QAAQ,CAAC,EAAE,MAAM,CAAC;IAAC,UAAU,CAAC,EAAE,MAAM,CAAA;CAAE,GAAG,IAAI,CAiBnD;AAED;;GAEG;AACH,wBAAgB,2BAA2B,CACvC,YAAY,EAAE,gBAAgB,EAC9B,YAAY,EAAE;IAAE,QAAQ,CAAC,EAAE,MAAM,CAAC;IAAC,UAAU,CAAC,EAAE,MAAM,CAAA;CAAE,GACzD,gBAAgB,CAOlB"}
@@ -0,0 +1,91 @@
1
+ import { z } from "zod";
2
+ /**
3
+ * VGF-style state schema for game-side recognition state/results management.
4
+ *
5
+ * This schema provides a standardized way for game developers to manage
6
+ * voice recognition state and results in their applications. It supports:
7
+ *
8
+ * STEP 1: Basic transcription flow
9
+ * STEP 2: Mic auto-stop upon correct answer (using partial transcripts)
10
+ * STEP 3: Semantic/function-call outcomes for game actions
11
+ *
12
+ * Ideally this should be part of a more centralized shared type library to free
13
+ * game developers and provide helper functions (VGF? Platform SDK?).
14
+ */
15
+ export declare const RecognitionVGFStateSchema: z.ZodObject<{
16
+ audioUtteranceId: z.ZodString;
17
+ startRecordingStatus: z.ZodOptional<z.ZodString>;
18
+ transcriptionStatus: z.ZodOptional<z.ZodString>;
19
+ finalTranscript: z.ZodOptional<z.ZodString>;
20
+ finalConfidence: z.ZodOptional<z.ZodNumber>;
21
+ asrConfig: z.ZodOptional<z.ZodString>;
22
+ startRecordingTimestamp: z.ZodOptional<z.ZodString>;
23
+ finalRecordingTimestamp: z.ZodOptional<z.ZodString>;
24
+ finalTranscriptionTimestamp: z.ZodOptional<z.ZodString>;
25
+ pendingTranscript: z.ZodDefault<z.ZodOptional<z.ZodString>>;
26
+ pendingConfidence: z.ZodOptional<z.ZodNumber>;
27
+ functionCallMetadata: z.ZodOptional<z.ZodString>;
28
+ functionCallConfidence: z.ZodOptional<z.ZodNumber>;
29
+ finalFunctionCallTimestamp: z.ZodOptional<z.ZodString>;
30
+ promptSlotMap: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodArray<z.ZodString, "many">>>;
31
+ recognitionActionProcessingState: z.ZodOptional<z.ZodString>;
32
+ }, "strip", z.ZodTypeAny, {
33
+ audioUtteranceId: string;
34
+ pendingTranscript: string;
35
+ startRecordingStatus?: string | undefined;
36
+ transcriptionStatus?: string | undefined;
37
+ finalTranscript?: string | undefined;
38
+ finalConfidence?: number | undefined;
39
+ asrConfig?: string | undefined;
40
+ startRecordingTimestamp?: string | undefined;
41
+ finalRecordingTimestamp?: string | undefined;
42
+ finalTranscriptionTimestamp?: string | undefined;
43
+ pendingConfidence?: number | undefined;
44
+ functionCallMetadata?: string | undefined;
45
+ functionCallConfidence?: number | undefined;
46
+ finalFunctionCallTimestamp?: string | undefined;
47
+ promptSlotMap?: Record<string, string[]> | undefined;
48
+ recognitionActionProcessingState?: string | undefined;
49
+ }, {
50
+ audioUtteranceId: string;
51
+ startRecordingStatus?: string | undefined;
52
+ transcriptionStatus?: string | undefined;
53
+ finalTranscript?: string | undefined;
54
+ finalConfidence?: number | undefined;
55
+ asrConfig?: string | undefined;
56
+ startRecordingTimestamp?: string | undefined;
57
+ finalRecordingTimestamp?: string | undefined;
58
+ finalTranscriptionTimestamp?: string | undefined;
59
+ pendingTranscript?: string | undefined;
60
+ pendingConfidence?: number | undefined;
61
+ functionCallMetadata?: string | undefined;
62
+ functionCallConfidence?: number | undefined;
63
+ finalFunctionCallTimestamp?: string | undefined;
64
+ promptSlotMap?: Record<string, string[]> | undefined;
65
+ recognitionActionProcessingState?: string | undefined;
66
+ }>;
67
+ export type RecognitionState = z.infer<typeof RecognitionVGFStateSchema>;
68
+ export declare const RecordingStatus: {
69
+ readonly NOT_READY: "NOT_READY";
70
+ readonly READY: "READY";
71
+ readonly RECORDING: "RECORDING";
72
+ readonly FINISHED: "FINISHED";
73
+ };
74
+ export type RecordingStatusType = typeof RecordingStatus[keyof typeof RecordingStatus];
75
+ export declare const TranscriptionStatus: {
76
+ readonly NOT_STARTED: "NOT_STARTED";
77
+ readonly IN_PROGRESS: "IN_PROGRESS";
78
+ readonly FINALIZED: "FINALIZED";
79
+ readonly ABORTED: "ABORTED";
80
+ readonly ERROR: "ERROR";
81
+ };
82
+ export type TranscriptionStatusType = typeof TranscriptionStatus[keyof typeof TranscriptionStatus];
83
+ export declare const RecognitionActionProcessingState: {
84
+ readonly NOT_STARTED: "NOT_STARTED";
85
+ readonly IN_PROGRESS: "IN_PROGRESS";
86
+ readonly COMPLETED: "COMPLETED";
87
+ };
88
+ export type RecognitionActionProcessingStateType = typeof RecognitionActionProcessingState[keyof typeof RecognitionActionProcessingState];
89
+ export declare function createInitialRecognitionState(audioUtteranceId: string): RecognitionState;
90
+ export declare function isValidRecordingStatusTransition(from: string | undefined, to: string): boolean;
91
+ //# sourceMappingURL=vgf-recognition-state.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"vgf-recognition-state.d.ts","sourceRoot":"","sources":["../src/vgf-recognition-state.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAA;AAEvB;;;;;;;;;;;;GAYG;AACH,eAAO,MAAM,yBAAyB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA6BpC,CAAA;AAEF,MAAM,MAAM,gBAAgB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,yBAAyB,CAAC,CAAA;AAGxE,eAAO,MAAM,eAAe;;;;;CAKlB,CAAA;AAEV,MAAM,MAAM,mBAAmB,GAAG,OAAO,eAAe,CAAC,MAAM,OAAO,eAAe,CAAC,CAAA;AAEtF,eAAO,MAAM,mBAAmB;;;;;;CAMtB,CAAA;AAEV,MAAM,MAAM,uBAAuB,GAAG,OAAO,mBAAmB,CAAC,MAAM,OAAO,mBAAmB,CAAC,CAAA;AAElG,eAAO,MAAM,gCAAgC;;;;CAInC,CAAA;AAEV,MAAM,MAAM,oCAAoC,GAAG,OAAO,gCAAgC,CAAC,MAAM,OAAO,gCAAgC,CAAC,CAAA;AAGzI,wBAAgB,6BAA6B,CAAC,gBAAgB,EAAE,MAAM,GAAG,gBAAgB,CAQxF;AAGD,wBAAgB,gCAAgC,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,EAAE,EAAE,EAAE,MAAM,GAAG,OAAO,CAa9F"}
package/package.json ADDED
@@ -0,0 +1,74 @@
1
+ {
2
+ "name": "@volley/recognition-client-sdk-node22",
3
+ "version": "0.1.424",
4
+ "description": "Recognition Service TypeScript/Node.js Client SDK (TypeScript 5.4.5 compatible build)",
5
+ "type": "module",
6
+ "main": "dist/index.js",
7
+ "module": "dist/index.js",
8
+ "types": "dist/index.bundled.d.ts",
9
+ "engines": {
10
+ "node": ">=22.0.0"
11
+ },
12
+ "exports": {
13
+ ".": {
14
+ "types": "./dist/index.bundled.d.ts",
15
+ "import": "./dist/index.js",
16
+ "default": "./dist/index.js"
17
+ },
18
+ "./browser": {
19
+ "types": "./dist/browser.bundled.d.ts",
20
+ "import": "./dist/recog-client-sdk.browser.js",
21
+ "default": "./dist/recog-client-sdk.browser.js"
22
+ }
23
+ },
24
+ "files": [
25
+ "dist",
26
+ "src"
27
+ ],
28
+ "sideEffects": false,
29
+ "publishConfig": {
30
+ "access": "public",
31
+ "provenance": true
32
+ },
33
+ "dependencies": {
34
+ "uuid": "^9.0.0",
35
+ "ws": "~8.18.0",
36
+ "zod": "3.22.4"
37
+ },
38
+ "devDependencies": {
39
+ "@semantic-release/changelog": "6.0.3",
40
+ "@semantic-release/commit-analyzer": "13.0.1",
41
+ "@semantic-release/exec": "7.1.0",
42
+ "@semantic-release/git": "10.0.1",
43
+ "@semantic-release/github": "12.0.0",
44
+ "@semantic-release/npm": "13.1.1",
45
+ "@semantic-release/release-notes-generator": "14.1.0",
46
+ "@types/jest": "^29.5.0",
47
+ "@types/node": "~20.11.27",
48
+ "@types/uuid": "^9.0.2",
49
+ "@types/ws": "^8.5.5",
50
+ "esbuild": "0.23.0",
51
+ "jest": "^29.7.0",
52
+ "rollup": "^4.9.0",
53
+ "rollup-plugin-dts": "^6.1.0",
54
+ "semantic-release": "^24.0.0",
55
+ "ts-jest": "^29.1.0",
56
+ "typescript": "~5.4.5",
57
+ "@recog/shared-config": "1.0.0",
58
+ "@recog/websocket": "1.0.0",
59
+ "@recog/shared-types": "1.0.0",
60
+ "@recog/shared-utils": "1.0.0"
61
+ },
62
+ "keywords": [
63
+ "recognition",
64
+ "sdk",
65
+ "audio",
66
+ "speech"
67
+ ],
68
+ "scripts": {
69
+ "build": "node build.mjs",
70
+ "dev": "node build.mjs --watch",
71
+ "test": "jest --passWithNoTests",
72
+ "lint": "eslint src --ext .ts"
73
+ }
74
+ }
package/src/browser.ts ADDED
@@ -0,0 +1,24 @@
1
+ /**
2
+ * Browser-only entry point for Recognition SDK
3
+ * Excludes server-side dependencies
4
+ *
5
+ * Note: This file directly imports browser-compatible modules only,
6
+ * avoiding Node.js-specific code like the ws package.
7
+ */
8
+
9
+ // Import directly from recognition-client
10
+ // It uses WebSocketAudioClient which is browser-compatible
11
+ export {
12
+ RealTimeTwoWayWebSocketRecognitionClient,
13
+ type RealTimeTwoWayWebSocketRecognitionClientConfig,
14
+ type TranscriptionResult
15
+ } from './recognition-client.js';
16
+
17
+ // Re-export the types needed for browser usage
18
+ export {
19
+ AudioEncoding,
20
+ ControlSignalTypeV1 as ControlSignal,
21
+ type GameContextV1,
22
+ RecognitionContextTypeV1
23
+ } from '@recog/shared-types';
24
+
@@ -0,0 +1,265 @@
1
+ /**
2
+ * Unit tests for ConfigBuilder
3
+ */
4
+
5
+ import { ConfigBuilder } from './config-builder.js';
6
+ import { RecognitionProvider, RecognitionContextTypeV1 } from '@recog/shared-types';
7
+
8
+ describe('ConfigBuilder', () => {
9
+ it('should build empty config', () => {
10
+ const config = new ConfigBuilder().build();
11
+ expect(config).toBeDefined();
12
+ });
13
+
14
+ it('should set url', () => {
15
+ const config = new ConfigBuilder()
16
+ .url('ws://localhost:3101/ws/v1/recognize')
17
+ .build();
18
+ expect(config.url).toBe('ws://localhost:3101/ws/v1/recognize');
19
+ });
20
+
21
+ it('should set asrRequestConfig', () => {
22
+ const asrConfig = {
23
+ provider: RecognitionProvider.DEEPGRAM,
24
+ model: 'nova-2-general',
25
+ language: 'en-US',
26
+ sampleRate: 16000,
27
+ encoding: 'linear16'
28
+ };
29
+ const config = new ConfigBuilder()
30
+ .asrRequestConfig(asrConfig)
31
+ .build();
32
+ expect(config.asrRequestConfig).toEqual(asrConfig);
33
+ });
34
+
35
+ it('should set gameContext', () => {
36
+ const gameContext = {
37
+ type: RecognitionContextTypeV1.GAME_CONTEXT as const,
38
+ gameId: 'test-game',
39
+ gamePhase: 'test-phase'
40
+ };
41
+ const config = new ConfigBuilder()
42
+ .gameContext(gameContext)
43
+ .build();
44
+ expect(config.gameContext).toEqual(gameContext);
45
+ });
46
+
47
+ it('should set audioUtteranceId', () => {
48
+ const config = new ConfigBuilder()
49
+ .audioUtteranceId('test-utterance-id')
50
+ .build();
51
+ expect(config.audioUtteranceId).toBe('test-utterance-id');
52
+ });
53
+
54
+ it('should set callbackUrls', () => {
55
+ const callbackUrls = [
56
+ { url: 'https://example.com/callback', event: 'transcript' as const }
57
+ ];
58
+ const config = new ConfigBuilder()
59
+ .callbackUrls(callbackUrls)
60
+ .build();
61
+ expect(config.callbackUrls).toEqual(callbackUrls);
62
+ });
63
+
64
+ it('should set userId', () => {
65
+ const config = new ConfigBuilder()
66
+ .userId('user-123')
67
+ .build();
68
+ expect(config.userId).toBe('user-123');
69
+ });
70
+
71
+ it('should set gameSessionId', () => {
72
+ const config = new ConfigBuilder()
73
+ .gameSessionId('session-456')
74
+ .build();
75
+ expect(config.gameSessionId).toBe('session-456');
76
+ });
77
+
78
+ it('should set deviceId', () => {
79
+ const config = new ConfigBuilder()
80
+ .deviceId('device-789')
81
+ .build();
82
+ expect(config.deviceId).toBe('device-789');
83
+ });
84
+
85
+ it('should set accountId', () => {
86
+ const config = new ConfigBuilder()
87
+ .accountId('account-abc')
88
+ .build();
89
+ expect(config.accountId).toBe('account-abc');
90
+ });
91
+
92
+ it('should set questionAnswerId', () => {
93
+ const config = new ConfigBuilder()
94
+ .questionAnswerId('qa-xyz')
95
+ .build();
96
+ expect(config.questionAnswerId).toBe('qa-xyz');
97
+ });
98
+
99
+ it('should set platform', () => {
100
+ const config = new ConfigBuilder()
101
+ .platform('ios')
102
+ .build();
103
+ expect(config.platform).toBe('ios');
104
+ });
105
+
106
+ it('should set onTranscript callback', () => {
107
+ const callback = jest.fn();
108
+ const config = new ConfigBuilder()
109
+ .onTranscript(callback)
110
+ .build();
111
+ expect(config.onTranscript).toBe(callback);
112
+ });
113
+
114
+ it('should set onMetadata callback', () => {
115
+ const callback = jest.fn();
116
+ const config = new ConfigBuilder()
117
+ .onMetadata(callback)
118
+ .build();
119
+ expect(config.onMetadata).toBe(callback);
120
+ });
121
+
122
+ it('should set onError callback', () => {
123
+ const callback = jest.fn();
124
+ const config = new ConfigBuilder()
125
+ .onError(callback)
126
+ .build();
127
+ expect(config.onError).toBe(callback);
128
+ });
129
+
130
+ it('should set onConnected callback', () => {
131
+ const callback = jest.fn();
132
+ const config = new ConfigBuilder()
133
+ .onConnected(callback)
134
+ .build();
135
+ expect(config.onConnected).toBe(callback);
136
+ });
137
+
138
+ it('should set onDisconnected callback', () => {
139
+ const callback = jest.fn();
140
+ const config = new ConfigBuilder()
141
+ .onDisconnected(callback)
142
+ .build();
143
+ expect(config.onDisconnected).toBe(callback);
144
+ });
145
+
146
+ it('should set highWaterMark', () => {
147
+ const config = new ConfigBuilder()
148
+ .highWaterMark(1000)
149
+ .build();
150
+ expect(config.highWaterMark).toBe(1000);
151
+ });
152
+
153
+ it('should set lowWaterMark', () => {
154
+ const config = new ConfigBuilder()
155
+ .lowWaterMark(500)
156
+ .build();
157
+ expect(config.lowWaterMark).toBe(500);
158
+ });
159
+
160
+ it('should set maxBufferDurationSec', () => {
161
+ const config = new ConfigBuilder()
162
+ .maxBufferDurationSec(10)
163
+ .build();
164
+ expect(config.maxBufferDurationSec).toBe(10);
165
+ });
166
+
167
+ it('should set chunksPerSecond', () => {
168
+ const config = new ConfigBuilder()
169
+ .chunksPerSecond(50)
170
+ .build();
171
+ expect(config.chunksPerSecond).toBe(50);
172
+ });
173
+
174
+ it('should set logger', () => {
175
+ const logger = jest.fn();
176
+ const config = new ConfigBuilder()
177
+ .logger(logger)
178
+ .build();
179
+ expect(config.logger).toBe(logger);
180
+ });
181
+
182
+ it('should support method chaining', () => {
183
+ const config = new ConfigBuilder()
184
+ .url('ws://localhost:3101/ws/v1/recognize')
185
+ .audioUtteranceId('test-id')
186
+ .userId('user-123')
187
+ .gameSessionId('session-456')
188
+ .deviceId('device-789')
189
+ .platform('ios')
190
+ .highWaterMark(1000)
191
+ .lowWaterMark(500)
192
+ .build();
193
+
194
+ expect(config.url).toBe('ws://localhost:3101/ws/v1/recognize');
195
+ expect(config.audioUtteranceId).toBe('test-id');
196
+ expect(config.userId).toBe('user-123');
197
+ expect(config.gameSessionId).toBe('session-456');
198
+ expect(config.deviceId).toBe('device-789');
199
+ expect(config.platform).toBe('ios');
200
+ expect(config.highWaterMark).toBe(1000);
201
+ expect(config.lowWaterMark).toBe(500);
202
+ });
203
+
204
+ it('should build complete configuration', () => {
205
+ const onTranscript = jest.fn();
206
+ const onError = jest.fn();
207
+ const onConnected = jest.fn();
208
+ const onDisconnected = jest.fn();
209
+ const logger = jest.fn();
210
+
211
+ const config = new ConfigBuilder()
212
+ .url('ws://localhost:3101/ws/v1/recognize')
213
+ .asrRequestConfig({
214
+ provider: RecognitionProvider.DEEPGRAM,
215
+ model: 'nova-2-general',
216
+ language: 'en-US',
217
+ sampleRate: 16000,
218
+ encoding: 'linear16'
219
+ })
220
+ .gameContext({
221
+ type: RecognitionContextTypeV1.GAME_CONTEXT as const,
222
+ gameId: 'test-game',
223
+ gamePhase: 'test-phase'
224
+ })
225
+ .audioUtteranceId('test-utterance-id')
226
+ .userId('user-123')
227
+ .gameSessionId('session-456')
228
+ .deviceId('device-789')
229
+ .accountId('account-abc')
230
+ .questionAnswerId('qa-xyz')
231
+ .platform('ios')
232
+ .onTranscript(onTranscript)
233
+ .onError(onError)
234
+ .onConnected(onConnected)
235
+ .onDisconnected(onDisconnected)
236
+ .highWaterMark(1000)
237
+ .lowWaterMark(500)
238
+ .maxBufferDurationSec(10)
239
+ .chunksPerSecond(50)
240
+ .logger(logger)
241
+ .build();
242
+
243
+ expect(config).toMatchObject({
244
+ url: 'ws://localhost:3101/ws/v1/recognize',
245
+ audioUtteranceId: 'test-utterance-id',
246
+ userId: 'user-123',
247
+ gameSessionId: 'session-456',
248
+ deviceId: 'device-789',
249
+ accountId: 'account-abc',
250
+ questionAnswerId: 'qa-xyz',
251
+ platform: 'ios',
252
+ highWaterMark: 1000,
253
+ lowWaterMark: 500,
254
+ maxBufferDurationSec: 10,
255
+ chunksPerSecond: 50
256
+ });
257
+ expect(config.asrRequestConfig?.provider).toBe(RecognitionProvider.DEEPGRAM);
258
+ expect(config.gameContext?.gameId).toBe('test-game');
259
+ expect(config.onTranscript).toBe(onTranscript);
260
+ expect(config.onError).toBe(onError);
261
+ expect(config.onConnected).toBe(onConnected);
262
+ expect(config.onDisconnected).toBe(onDisconnected);
263
+ expect(config.logger).toBe(logger);
264
+ });
265
+ });