@mulingai-npm/redis 3.38.2 → 3.38.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -25,6 +25,11 @@ export type SttHistoryEntry = {
|
|
|
25
25
|
source: 'stt' | 'llm';
|
|
26
26
|
provider: string;
|
|
27
27
|
};
|
|
28
|
+
export type TranslationHint = {
|
|
29
|
+
type: string;
|
|
30
|
+
ref: string;
|
|
31
|
+
text: string;
|
|
32
|
+
};
|
|
28
33
|
export type MulingstreamChunkData = {
|
|
29
34
|
chunkId: string;
|
|
30
35
|
roomId: string;
|
|
@@ -34,6 +39,7 @@ export type MulingstreamChunkData = {
|
|
|
34
39
|
targetLanguages: string[];
|
|
35
40
|
shortCodeTargetLanguages: string[];
|
|
36
41
|
finalTranscription: string;
|
|
42
|
+
sanitizedTranscription?: string;
|
|
37
43
|
createdAt: number;
|
|
38
44
|
creditUsageSessionId?: number | null;
|
|
39
45
|
streamingChunk: StreamingChunkData;
|
|
@@ -41,6 +47,7 @@ export type MulingstreamChunkData = {
|
|
|
41
47
|
[language: string]: {
|
|
42
48
|
translation: string;
|
|
43
49
|
status: StepStatus;
|
|
50
|
+
hints?: TranslationHint[];
|
|
44
51
|
};
|
|
45
52
|
};
|
|
46
53
|
tts: {
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* SmartTranslate Context Manager
|
|
3
|
+
*
|
|
4
|
+
* Manages previous chunk context in Redis for best-effort context injection.
|
|
5
|
+
* Each session stores up to 3 previous chunks (sanitized preferred, raw STT fallback).
|
|
6
|
+
*
|
|
7
|
+
* Redis key: smarttranslate:context:{sessionId}
|
|
8
|
+
* Type: List (LPUSH new entries, LTRIM to keep last 3)
|
|
9
|
+
* TTL: 12 hours (same as chunk expiration)
|
|
10
|
+
*/
|
|
11
|
+
import { RedisClient } from '../redis-client';
|
|
12
|
+
export interface ContextChunk {
|
|
13
|
+
seq: number;
|
|
14
|
+
text: string;
|
|
15
|
+
san: boolean;
|
|
16
|
+
ts: number;
|
|
17
|
+
}
|
|
18
|
+
export declare class SmartTranslateContextManager {
|
|
19
|
+
private redisClient;
|
|
20
|
+
constructor(redisClient: RedisClient);
|
|
21
|
+
private contextKey;
|
|
22
|
+
/**
|
|
23
|
+
* Get the last N context chunks for a session (best-effort).
|
|
24
|
+
* Returns whatever is available RIGHT NOW — doesn't wait for pending LLM results.
|
|
25
|
+
*/
|
|
26
|
+
getContext(sessionId: string, maxChunks?: number): Promise<ContextChunk[]>;
|
|
27
|
+
/**
|
|
28
|
+
* Push a context entry for a chunk.
|
|
29
|
+
* If the chunk was LLM-sanitized, push the sanitized text (san: true).
|
|
30
|
+
* If bypassed/Azure, push the raw STT text (san: false).
|
|
31
|
+
*
|
|
32
|
+
* Also handles "upgrade" — if a raw entry exists for this seq and LLM result
|
|
33
|
+
* comes back later, we replace it with the sanitized version.
|
|
34
|
+
*/
|
|
35
|
+
pushContext(sessionId: string, entry: ContextChunk): Promise<void>;
|
|
36
|
+
/**
|
|
37
|
+
* Push raw STT text as fallback context immediately when a chunk arrives.
|
|
38
|
+
* This ensures context is available even before LLM processing finishes.
|
|
39
|
+
*/
|
|
40
|
+
pushRawContext(sessionId: string, seq: number, text: string): Promise<void>;
|
|
41
|
+
/**
|
|
42
|
+
* Upgrade a context entry with LLM-sanitized text.
|
|
43
|
+
* Called when LLM returns a result for a chunk that already has raw context.
|
|
44
|
+
*/
|
|
45
|
+
pushSanitizedContext(sessionId: string, seq: number, sanitizedText: string): Promise<void>;
|
|
46
|
+
/**
|
|
47
|
+
* Clean up context for a session (called when streaming ends).
|
|
48
|
+
*/
|
|
49
|
+
cleanupSession(sessionId: string): Promise<void>;
|
|
50
|
+
}
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* SmartTranslate Context Manager
|
|
4
|
+
*
|
|
5
|
+
* Manages previous chunk context in Redis for best-effort context injection.
|
|
6
|
+
* Each session stores up to 3 previous chunks (sanitized preferred, raw STT fallback).
|
|
7
|
+
*
|
|
8
|
+
* Redis key: smarttranslate:context:{sessionId}
|
|
9
|
+
* Type: List (LPUSH new entries, LTRIM to keep last 3)
|
|
10
|
+
* TTL: 12 hours (same as chunk expiration)
|
|
11
|
+
*/
|
|
12
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
13
|
+
exports.SmartTranslateContextManager = void 0;
|
|
14
|
+
const CONTEXT_TTL = 12 * 60 * 60; // 12 hours
|
|
15
|
+
const MAX_CONTEXT_CHUNKS = 3;
|
|
16
|
+
class SmartTranslateContextManager {
|
|
17
|
+
constructor(redisClient) {
|
|
18
|
+
this.redisClient = redisClient;
|
|
19
|
+
}
|
|
20
|
+
contextKey(sessionId) {
|
|
21
|
+
return `smarttranslate:context:${sessionId}`;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Get the last N context chunks for a session (best-effort).
|
|
25
|
+
* Returns whatever is available RIGHT NOW — doesn't wait for pending LLM results.
|
|
26
|
+
*/
|
|
27
|
+
async getContext(sessionId, maxChunks = MAX_CONTEXT_CHUNKS) {
|
|
28
|
+
try {
|
|
29
|
+
const key = this.contextKey(sessionId);
|
|
30
|
+
const raw = await this.redisClient.lrange(key, 0, maxChunks - 1);
|
|
31
|
+
if (!raw || raw.length === 0)
|
|
32
|
+
return [];
|
|
33
|
+
return raw
|
|
34
|
+
.map((entry) => {
|
|
35
|
+
try {
|
|
36
|
+
return JSON.parse(entry);
|
|
37
|
+
}
|
|
38
|
+
catch {
|
|
39
|
+
return null;
|
|
40
|
+
}
|
|
41
|
+
})
|
|
42
|
+
.filter((c) => c !== null)
|
|
43
|
+
.reverse(); // LPUSH stores newest first, we want chronological order
|
|
44
|
+
}
|
|
45
|
+
catch (err) {
|
|
46
|
+
console.warn(`[SmartTranslateContextManager] getContext FAILED for ${sessionId}: ${err}`);
|
|
47
|
+
return [];
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Push a context entry for a chunk.
|
|
52
|
+
* If the chunk was LLM-sanitized, push the sanitized text (san: true).
|
|
53
|
+
* If bypassed/Azure, push the raw STT text (san: false).
|
|
54
|
+
*
|
|
55
|
+
* Also handles "upgrade" — if a raw entry exists for this seq and LLM result
|
|
56
|
+
* comes back later, we replace it with the sanitized version.
|
|
57
|
+
*/
|
|
58
|
+
async pushContext(sessionId, entry) {
|
|
59
|
+
try {
|
|
60
|
+
const key = this.contextKey(sessionId);
|
|
61
|
+
// Check if an entry for this seq already exists (raw fallback that we can upgrade)
|
|
62
|
+
const existing = await this.getContext(sessionId, MAX_CONTEXT_CHUNKS + 2);
|
|
63
|
+
const existingIdx = existing.findIndex((c) => c.seq === entry.seq);
|
|
64
|
+
if (existingIdx >= 0 && entry.san && !existing[existingIdx].san) {
|
|
65
|
+
// Upgrade: replace raw entry with sanitized version
|
|
66
|
+
// We need to rebuild the list (Redis list doesn't support random update easily)
|
|
67
|
+
existing[existingIdx] = entry;
|
|
68
|
+
const pipe = this.redisClient.pipeline();
|
|
69
|
+
pipe.del(key);
|
|
70
|
+
// Push in reverse order since LPUSH adds to head
|
|
71
|
+
for (let i = existing.length - 1; i >= 0; i--) {
|
|
72
|
+
pipe.lpush(key, JSON.stringify(existing[i]));
|
|
73
|
+
}
|
|
74
|
+
pipe.ltrim(key, 0, MAX_CONTEXT_CHUNKS - 1);
|
|
75
|
+
pipe.expire(key, CONTEXT_TTL);
|
|
76
|
+
await pipe.exec();
|
|
77
|
+
return;
|
|
78
|
+
}
|
|
79
|
+
// Normal push: add new entry
|
|
80
|
+
if (existingIdx < 0) {
|
|
81
|
+
const pipe = this.redisClient.pipeline();
|
|
82
|
+
pipe.lpush(key, JSON.stringify(entry));
|
|
83
|
+
pipe.ltrim(key, 0, MAX_CONTEXT_CHUNKS - 1);
|
|
84
|
+
pipe.expire(key, CONTEXT_TTL);
|
|
85
|
+
await pipe.exec();
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
catch (err) {
|
|
89
|
+
console.warn(`[SmartTranslateContextManager] pushContext FAILED for ${sessionId}: ${err}`);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
/**
|
|
93
|
+
* Push raw STT text as fallback context immediately when a chunk arrives.
|
|
94
|
+
* This ensures context is available even before LLM processing finishes.
|
|
95
|
+
*/
|
|
96
|
+
async pushRawContext(sessionId, seq, text) {
|
|
97
|
+
await this.pushContext(sessionId, {
|
|
98
|
+
seq,
|
|
99
|
+
text,
|
|
100
|
+
san: false,
|
|
101
|
+
ts: Date.now()
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* Upgrade a context entry with LLM-sanitized text.
|
|
106
|
+
* Called when LLM returns a result for a chunk that already has raw context.
|
|
107
|
+
*/
|
|
108
|
+
async pushSanitizedContext(sessionId, seq, sanitizedText) {
|
|
109
|
+
await this.pushContext(sessionId, {
|
|
110
|
+
seq,
|
|
111
|
+
text: sanitizedText,
|
|
112
|
+
san: true,
|
|
113
|
+
ts: Date.now()
|
|
114
|
+
});
|
|
115
|
+
}
|
|
116
|
+
/**
|
|
117
|
+
* Clean up context for a session (called when streaming ends).
|
|
118
|
+
*/
|
|
119
|
+
async cleanupSession(sessionId) {
|
|
120
|
+
try {
|
|
121
|
+
await this.redisClient.del(this.contextKey(sessionId));
|
|
122
|
+
}
|
|
123
|
+
catch {
|
|
124
|
+
// Cleanup failure is non-critical
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
exports.SmartTranslateContextManager = SmartTranslateContextManager;
|
package/dist/redis-client.d.ts
CHANGED
|
@@ -38,6 +38,9 @@ export declare class RedisClient {
|
|
|
38
38
|
incr(key: string): Promise<number>;
|
|
39
39
|
scan(cursor: number, pattern: string, count?: number): Promise<[string, string[]]>;
|
|
40
40
|
keys(pattern: string): Promise<string[]>;
|
|
41
|
+
lpush(key: string, value: string): Promise<number>;
|
|
42
|
+
lrange(key: string, start: number, stop: number): Promise<string[]>;
|
|
43
|
+
ltrim(key: string, start: number, stop: number): Promise<string>;
|
|
41
44
|
pipeline(): ReturnType<IORedis['pipeline']>;
|
|
42
45
|
unlink(...keys: string[]): Promise<number>;
|
|
43
46
|
jsonSet<T>(key: string, path: string, value: T): Promise<string>;
|
package/dist/redis-client.js
CHANGED
|
@@ -109,6 +109,16 @@ class RedisClient {
|
|
|
109
109
|
async keys(pattern) {
|
|
110
110
|
return this.client.keys(pattern);
|
|
111
111
|
}
|
|
112
|
+
// List operations (used by SmartTranslate context manager)
|
|
113
|
+
async lpush(key, value) {
|
|
114
|
+
return this.client.lpush(key, value);
|
|
115
|
+
}
|
|
116
|
+
async lrange(key, start, stop) {
|
|
117
|
+
return this.client.lrange(key, start, stop);
|
|
118
|
+
}
|
|
119
|
+
async ltrim(key, start, stop) {
|
|
120
|
+
return this.client.ltrim(key, start, stop);
|
|
121
|
+
}
|
|
112
122
|
pipeline() {
|
|
113
123
|
return this.client.pipeline();
|
|
114
124
|
}
|