@mulingai-npm/redis 3.38.1 → 3.38.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -20,6 +20,16 @@ export type StreamingChunkData = {
20
20
  model?: string;
21
21
  words?: TranscriptionWord[];
22
22
  };
23
+ export type SttHistoryEntry = {
24
+ chunkNumber: number;
25
+ source: 'stt' | 'llm';
26
+ provider: string;
27
+ };
28
+ export type TranslationHint = {
29
+ type: string;
30
+ ref: string;
31
+ text: string;
32
+ };
23
33
  export type MulingstreamChunkData = {
24
34
  chunkId: string;
25
35
  roomId: string;
@@ -29,6 +39,7 @@ export type MulingstreamChunkData = {
29
39
  targetLanguages: string[];
30
40
  shortCodeTargetLanguages: string[];
31
41
  finalTranscription: string;
42
+ sanitizedTranscription?: string;
32
43
  createdAt: number;
33
44
  creditUsageSessionId?: number | null;
34
45
  streamingChunk: StreamingChunkData;
@@ -36,6 +47,7 @@ export type MulingstreamChunkData = {
36
47
  [language: string]: {
37
48
  translation: string;
38
49
  status: StepStatus;
50
+ hints?: TranslationHint[];
39
51
  };
40
52
  };
41
53
  tts: {
@@ -48,6 +60,11 @@ export type MulingstreamChunkData = {
48
60
  };
49
61
  isComplete: boolean;
50
62
  isSaved: boolean;
63
+ llmTranscription?: string;
64
+ llmWords?: TranscriptionWord[];
65
+ transcriptionSource: 'stt' | 'llm';
66
+ routeUsed?: 'LLM' | 'AZURE' | 'AZURE_FALLBACK';
67
+ sttHistory?: SttHistoryEntry[];
51
68
  };
52
69
  export declare class MulingstreamChunkManager {
53
70
  private redisClient;
@@ -91,6 +108,21 @@ export declare class MulingstreamChunkManager {
91
108
  getMulingstreamChunkById(roomId: string, n: number): Promise<MulingstreamChunkData>;
92
109
  private withChunk;
93
110
  updateFinalTranscription(roomId: string, n: number, transcription: string): Promise<MulingstreamChunkData | null>;
111
+ /**
112
+ * Update chunk with LLM-sanitized transcription from SmartTranslate.
113
+ * If the sanitized text differs from the original, sets transcriptionSource to 'llm'
114
+ * and updates finalTranscription to the sanitized version.
115
+ * Returns the chunk (caller can compare original vs sanitized to decide on TRANSCRIPTION_CORRECTED).
116
+ */
117
+ updateLlmTranscription(roomId: string, n: number, opts: {
118
+ llmTranscription: string;
119
+ llmWords?: TranscriptionWord[];
120
+ routeUsed: 'LLM' | 'AZURE' | 'AZURE_FALLBACK';
121
+ sttHistoryEntry?: SttHistoryEntry;
122
+ }): Promise<{
123
+ chunk: MulingstreamChunkData | null;
124
+ wasChanged: boolean;
125
+ }>;
94
126
  discardLanguage(roomId: string, n: number, lang: string): Promise<MulingstreamChunkData | null>;
95
127
  discardLanguages(roomId: string, n: number, opt: {
96
128
  translation?: string[];
@@ -39,7 +39,13 @@ class MulingstreamChunkManager {
39
39
  translation: this.deserialize(h.translation),
40
40
  tts: this.deserialize(h.tts),
41
41
  isComplete: h.isComplete === 'true',
42
- isSaved: h.isSaved === 'true'
42
+ isSaved: h.isSaved === 'true',
43
+ // SmartTranslate fields
44
+ llmTranscription: h.llmTranscription || undefined,
45
+ llmWords: h.llmWords ? this.deserialize(h.llmWords) : undefined,
46
+ transcriptionSource: h.transcriptionSource || 'stt',
47
+ routeUsed: h.routeUsed || undefined,
48
+ sttHistory: h.sttHistory ? this.deserialize(h.sttHistory) : undefined
43
49
  };
44
50
  }
45
51
  getTimeout() {
@@ -133,7 +139,9 @@ class MulingstreamChunkManager {
133
139
  translation,
134
140
  tts,
135
141
  isComplete: false,
136
- isSaved: false
142
+ isSaved: false,
143
+ // SmartTranslate defaults
144
+ transcriptionSource: 'stt'
137
145
  };
138
146
  const hash = {
139
147
  chunkId,
@@ -150,7 +158,8 @@ class MulingstreamChunkManager {
150
158
  translation: this.serialize(translation),
151
159
  tts: this.serialize(tts),
152
160
  isComplete: 'false',
153
- isSaved: 'false'
161
+ isSaved: 'false',
162
+ transcriptionSource: 'stt'
154
163
  };
155
164
  const pipe = this.redisClient.pipeline();
156
165
  pipe.hset(this.chunkHashKey(chunkId), hash);
@@ -194,14 +203,25 @@ class MulingstreamChunkManager {
194
203
  const chunk = this.hashToChunk(raw);
195
204
  await fn(chunk);
196
205
  const p = this.redisClient.pipeline();
197
- p.hset(key, {
206
+ const updateHash = {
198
207
  finalTranscription: chunk.finalTranscription,
199
208
  translation: this.serialize(chunk.translation),
200
209
  tts: this.serialize(chunk.tts),
201
210
  streamingChunk: this.serialize(chunk.streamingChunk),
202
211
  isComplete: String(chunk.isComplete),
203
- isSaved: String(chunk.isSaved)
204
- });
212
+ isSaved: String(chunk.isSaved),
213
+ transcriptionSource: chunk.transcriptionSource || 'stt'
214
+ };
215
+ // SmartTranslate fields (only set if populated)
216
+ if (chunk.llmTranscription !== undefined)
217
+ updateHash.llmTranscription = chunk.llmTranscription;
218
+ if (chunk.llmWords !== undefined)
219
+ updateHash.llmWords = this.serialize(chunk.llmWords);
220
+ if (chunk.routeUsed !== undefined)
221
+ updateHash.routeUsed = chunk.routeUsed;
222
+ if (chunk.sttHistory !== undefined)
223
+ updateHash.sttHistory = this.serialize(chunk.sttHistory);
224
+ p.hset(key, updateHash);
205
225
  p.expire(key, EXPIRATION);
206
226
  await p.exec();
207
227
  return chunk;
@@ -211,6 +231,36 @@ class MulingstreamChunkManager {
211
231
  c.finalTranscription = transcription;
212
232
  });
213
233
  }
234
+ /**
235
+ * Update chunk with LLM-sanitized transcription from SmartTranslate.
236
+ * If the sanitized text differs from the original, sets transcriptionSource to 'llm'
237
+ * and updates finalTranscription to the sanitized version.
238
+ * Returns the chunk (caller can compare original vs sanitized to decide on TRANSCRIPTION_CORRECTED).
239
+ */
240
+ async updateLlmTranscription(roomId, n, opts) {
241
+ let wasChanged = false;
242
+ const chunk = await this.withChunk(roomId, n, (c) => {
243
+ const originalText = c.finalTranscription;
244
+ c.llmTranscription = opts.llmTranscription;
245
+ c.llmWords = opts.llmWords;
246
+ c.routeUsed = opts.routeUsed;
247
+ // If LLM produced a different transcription, update the active text
248
+ if (opts.routeUsed === 'LLM' && opts.llmTranscription !== originalText) {
249
+ c.finalTranscription = opts.llmTranscription;
250
+ c.transcriptionSource = 'llm';
251
+ wasChanged = true;
252
+ }
253
+ // Append to STT history (keep last 3)
254
+ if (opts.sttHistoryEntry) {
255
+ if (!c.sttHistory)
256
+ c.sttHistory = [];
257
+ c.sttHistory.push(opts.sttHistoryEntry);
258
+ if (c.sttHistory.length > 3)
259
+ c.sttHistory = c.sttHistory.slice(-3);
260
+ }
261
+ });
262
+ return { chunk, wasChanged };
263
+ }
214
264
  async discardLanguage(roomId, n, lang) {
215
265
  return this.withChunk(roomId, n, (c) => {
216
266
  if (c.translation[lang])
@@ -0,0 +1,50 @@
1
+ /**
2
+ * SmartTranslate Context Manager
3
+ *
4
+ * Manages previous chunk context in Redis for best-effort context injection.
5
+ * Each session stores up to 3 previous chunks (sanitized preferred, raw STT fallback).
6
+ *
7
+ * Redis key: smarttranslate:context:{sessionId}
8
+ * Type: List (LPUSH new entries, LTRIM to keep last 3)
9
+ * TTL: 12 hours (same as chunk expiration)
10
+ */
11
+ import { RedisClient } from '../redis-client';
12
+ export interface ContextChunk {
13
+ seq: number;
14
+ text: string;
15
+ san: boolean;
16
+ ts: number;
17
+ }
18
+ export declare class SmartTranslateContextManager {
19
+ private redisClient;
20
+ constructor(redisClient: RedisClient);
21
+ private contextKey;
22
+ /**
23
+ * Get the last N context chunks for a session (best-effort).
24
+ * Returns whatever is available RIGHT NOW — doesn't wait for pending LLM results.
25
+ */
26
+ getContext(sessionId: string, maxChunks?: number): Promise<ContextChunk[]>;
27
+ /**
28
+ * Push a context entry for a chunk.
29
+ * If the chunk was LLM-sanitized, push the sanitized text (san: true).
30
+ * If bypassed/Azure, push the raw STT text (san: false).
31
+ *
32
+ * Also handles "upgrade" — if a raw entry exists for this seq and LLM result
33
+ * comes back later, we replace it with the sanitized version.
34
+ */
35
+ pushContext(sessionId: string, entry: ContextChunk): Promise<void>;
36
+ /**
37
+ * Push raw STT text as fallback context immediately when a chunk arrives.
38
+ * This ensures context is available even before LLM processing finishes.
39
+ */
40
+ pushRawContext(sessionId: string, seq: number, text: string): Promise<void>;
41
+ /**
42
+ * Upgrade a context entry with LLM-sanitized text.
43
+ * Called when LLM returns a result for a chunk that already has raw context.
44
+ */
45
+ pushSanitizedContext(sessionId: string, seq: number, sanitizedText: string): Promise<void>;
46
+ /**
47
+ * Clean up context for a session (called when streaming ends).
48
+ */
49
+ cleanupSession(sessionId: string): Promise<void>;
50
+ }
@@ -0,0 +1,128 @@
1
+ "use strict";
2
+ /**
3
+ * SmartTranslate Context Manager
4
+ *
5
+ * Manages previous chunk context in Redis for best-effort context injection.
6
+ * Each session stores up to 3 previous chunks (sanitized preferred, raw STT fallback).
7
+ *
8
+ * Redis key: smarttranslate:context:{sessionId}
9
+ * Type: List (LPUSH new entries, LTRIM to keep last 3)
10
+ * TTL: 12 hours (same as chunk expiration)
11
+ */
12
+ Object.defineProperty(exports, "__esModule", { value: true });
13
+ exports.SmartTranslateContextManager = void 0;
14
+ const CONTEXT_TTL = 12 * 60 * 60; // 12 hours
15
+ const MAX_CONTEXT_CHUNKS = 3;
16
+ class SmartTranslateContextManager {
17
+ constructor(redisClient) {
18
+ this.redisClient = redisClient;
19
+ }
20
+ contextKey(sessionId) {
21
+ return `smarttranslate:context:${sessionId}`;
22
+ }
23
+ /**
24
+ * Get the last N context chunks for a session (best-effort).
25
+ * Returns whatever is available RIGHT NOW — doesn't wait for pending LLM results.
26
+ */
27
+ async getContext(sessionId, maxChunks = MAX_CONTEXT_CHUNKS) {
28
+ try {
29
+ const key = this.contextKey(sessionId);
30
+ const raw = await this.redisClient.lrange(key, 0, maxChunks - 1);
31
+ if (!raw || raw.length === 0)
32
+ return [];
33
+ return raw
34
+ .map((entry) => {
35
+ try {
36
+ return JSON.parse(entry);
37
+ }
38
+ catch {
39
+ return null;
40
+ }
41
+ })
42
+ .filter((c) => c !== null)
43
+ .reverse(); // LPUSH stores newest first, we want chronological order
44
+ }
45
+ catch (err) {
46
+ console.warn(`[SmartTranslateContextManager] getContext FAILED for ${sessionId}: ${err}`);
47
+ return [];
48
+ }
49
+ }
50
+ /**
51
+ * Push a context entry for a chunk.
52
+ * If the chunk was LLM-sanitized, push the sanitized text (san: true).
53
+ * If bypassed/Azure, push the raw STT text (san: false).
54
+ *
55
+ * Also handles "upgrade" — if a raw entry exists for this seq and LLM result
56
+ * comes back later, we replace it with the sanitized version.
57
+ */
58
+ async pushContext(sessionId, entry) {
59
+ try {
60
+ const key = this.contextKey(sessionId);
61
+ // Check if an entry for this seq already exists (raw fallback that we can upgrade)
62
+ const existing = await this.getContext(sessionId, MAX_CONTEXT_CHUNKS + 2);
63
+ const existingIdx = existing.findIndex((c) => c.seq === entry.seq);
64
+ if (existingIdx >= 0 && entry.san && !existing[existingIdx].san) {
65
+ // Upgrade: replace raw entry with sanitized version
66
+ // We need to rebuild the list (Redis list doesn't support random update easily)
67
+ existing[existingIdx] = entry;
68
+ const pipe = this.redisClient.pipeline();
69
+ pipe.del(key);
70
+ // Push in reverse order since LPUSH adds to head
71
+ for (let i = existing.length - 1; i >= 0; i--) {
72
+ pipe.lpush(key, JSON.stringify(existing[i]));
73
+ }
74
+ pipe.ltrim(key, 0, MAX_CONTEXT_CHUNKS - 1);
75
+ pipe.expire(key, CONTEXT_TTL);
76
+ await pipe.exec();
77
+ return;
78
+ }
79
+ // Normal push: add new entry
80
+ if (existingIdx < 0) {
81
+ const pipe = this.redisClient.pipeline();
82
+ pipe.lpush(key, JSON.stringify(entry));
83
+ pipe.ltrim(key, 0, MAX_CONTEXT_CHUNKS - 1);
84
+ pipe.expire(key, CONTEXT_TTL);
85
+ await pipe.exec();
86
+ }
87
+ }
88
+ catch (err) {
89
+ console.warn(`[SmartTranslateContextManager] pushContext FAILED for ${sessionId}: ${err}`);
90
+ }
91
+ }
92
+ /**
93
+ * Push raw STT text as fallback context immediately when a chunk arrives.
94
+ * This ensures context is available even before LLM processing finishes.
95
+ */
96
+ async pushRawContext(sessionId, seq, text) {
97
+ await this.pushContext(sessionId, {
98
+ seq,
99
+ text,
100
+ san: false,
101
+ ts: Date.now()
102
+ });
103
+ }
104
+ /**
105
+ * Upgrade a context entry with LLM-sanitized text.
106
+ * Called when LLM returns a result for a chunk that already has raw context.
107
+ */
108
+ async pushSanitizedContext(sessionId, seq, sanitizedText) {
109
+ await this.pushContext(sessionId, {
110
+ seq,
111
+ text: sanitizedText,
112
+ san: true,
113
+ ts: Date.now()
114
+ });
115
+ }
116
+ /**
117
+ * Clean up context for a session (called when streaming ends).
118
+ */
119
+ async cleanupSession(sessionId) {
120
+ try {
121
+ await this.redisClient.del(this.contextKey(sessionId));
122
+ }
123
+ catch {
124
+ // Cleanup failure is non-critical
125
+ }
126
+ }
127
+ }
128
+ exports.SmartTranslateContextManager = SmartTranslateContextManager;
@@ -38,6 +38,9 @@ export declare class RedisClient {
38
38
  incr(key: string): Promise<number>;
39
39
  scan(cursor: number, pattern: string, count?: number): Promise<[string, string[]]>;
40
40
  keys(pattern: string): Promise<string[]>;
41
+ lpush(key: string, value: string): Promise<number>;
42
+ lrange(key: string, start: number, stop: number): Promise<string[]>;
43
+ ltrim(key: string, start: number, stop: number): Promise<string>;
41
44
  pipeline(): ReturnType<IORedis['pipeline']>;
42
45
  unlink(...keys: string[]): Promise<number>;
43
46
  jsonSet<T>(key: string, path: string, value: T): Promise<string>;
@@ -109,6 +109,16 @@ class RedisClient {
109
109
  async keys(pattern) {
110
110
  return this.client.keys(pattern);
111
111
  }
112
+ // List operations (used by SmartTranslate context manager)
113
+ async lpush(key, value) {
114
+ return this.client.lpush(key, value);
115
+ }
116
+ async lrange(key, start, stop) {
117
+ return this.client.lrange(key, start, stop);
118
+ }
119
+ async ltrim(key, start, stop) {
120
+ return this.client.ltrim(key, start, stop);
121
+ }
112
122
  pipeline() {
113
123
  return this.client.pipeline();
114
124
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@mulingai-npm/redis",
3
- "version": "3.38.1",
3
+ "version": "3.38.3",
4
4
  "main": "dist/index.js",
5
5
  "types": "dist/index.d.ts",
6
6
  "repository": {