@dropout-ai/runtime 0.2.5 → 0.2.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/index.js +129 -136
package/package.json
CHANGED
package/src/index.js
CHANGED
|
@@ -3,12 +3,12 @@
|
|
|
3
3
|
* Role: Passive observer inside the user’s app.
|
|
4
4
|
*
|
|
5
5
|
* Runtime never reasons, never interprets, never stores intelligence.
|
|
6
|
-
*
|
|
6
|
+
* it only: Observes, Normalizes, Emits signals.
|
|
7
7
|
*/
|
|
8
8
|
|
|
9
9
|
const crypto = require('crypto');
|
|
10
10
|
|
|
11
|
-
// ---
|
|
11
|
+
// --- Identity & State ---
|
|
12
12
|
const GLOBAL_OBJ = typeof window !== 'undefined' ? window : global;
|
|
13
13
|
|
|
14
14
|
function generateSessionId() {
|
|
@@ -19,15 +19,15 @@ function generateSessionId() {
|
|
|
19
19
|
}
|
|
20
20
|
}
|
|
21
21
|
|
|
22
|
-
// One session = one continuous user attempt
|
|
23
|
-
// Resets on page reload (Browser) or process restart (Node)
|
|
24
22
|
if (!GLOBAL_OBJ.__dropout_session_id__) {
|
|
25
23
|
GLOBAL_OBJ.__dropout_session_id__ = generateSessionId();
|
|
26
24
|
}
|
|
27
25
|
|
|
28
26
|
let turnIndex = 0;
|
|
27
|
+
let lastPromptHash = null;
|
|
28
|
+
let lastResponseHash = null;
|
|
29
29
|
|
|
30
|
-
// ---
|
|
30
|
+
// --- Runtime Guarantees ---
|
|
31
31
|
let config = {
|
|
32
32
|
maxOutputBytes: 32768,
|
|
33
33
|
captureEndpoint: 'http://localhost:4000/capture',
|
|
@@ -35,7 +35,7 @@ let config = {
|
|
|
35
35
|
privacyMode: (typeof process !== 'undefined' && process.env.DROPOUT_PRIVACY_MODE) || 'safe'
|
|
36
36
|
};
|
|
37
37
|
|
|
38
|
-
//
|
|
38
|
+
// Remote config fetch (Non-blocking)
|
|
39
39
|
setTimeout(async () => {
|
|
40
40
|
const fetchFn = GLOBAL_OBJ.__dropout_original_fetch__ || GLOBAL_OBJ.fetch;
|
|
41
41
|
if (typeof fetchFn !== 'function') return;
|
|
@@ -65,9 +65,9 @@ function emit(payload) {
|
|
|
65
65
|
}, 0);
|
|
66
66
|
}
|
|
67
67
|
|
|
68
|
-
// ---
|
|
68
|
+
// --- Content Utilities ---
|
|
69
69
|
|
|
70
|
-
function
|
|
70
|
+
function hash(text) {
|
|
71
71
|
if (!text) return null;
|
|
72
72
|
try {
|
|
73
73
|
return crypto.createHash('sha256').update(text.toLowerCase().trim()).digest('hex');
|
|
@@ -76,34 +76,7 @@ function getSemanticHash(text) {
|
|
|
76
76
|
}
|
|
77
77
|
}
|
|
78
78
|
|
|
79
|
-
|
|
80
|
-
if (!text) return null;
|
|
81
|
-
return {
|
|
82
|
-
chars: text.length,
|
|
83
|
-
words: text.split(/\s+/).length,
|
|
84
|
-
lines: text.split('\n').length,
|
|
85
|
-
has_code: /```/.test(text),
|
|
86
|
-
has_list: /^\s*[-*•\d+.]/m.test(text)
|
|
87
|
-
};
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
const PATTERNS = {
|
|
91
|
-
negative: ["wrong", "bad", "not helpful", "incorrect", "stupid", "error", "worst"],
|
|
92
|
-
positive: ["thanks", "good", "perfect", "correct", "great", "helpful"],
|
|
93
|
-
struggle: ["but", "why", "stop", "don't", "no", "again", "explain"]
|
|
94
|
-
};
|
|
95
|
-
|
|
96
|
-
function getFlags(text) {
|
|
97
|
-
if (!text) return [];
|
|
98
|
-
const t = text.toLowerCase();
|
|
99
|
-
const flags = [];
|
|
100
|
-
if (PATTERNS.negative.some(p => t.includes(p))) flags.push('neg');
|
|
101
|
-
if (PATTERNS.positive.some(p => t.includes(p))) flags.push('pos');
|
|
102
|
-
if (PATTERNS.struggle.some(p => t.includes(p))) flags.push('clash');
|
|
103
|
-
return flags;
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
// --- B. Event Capture (Provider-agnostic) ---
|
|
79
|
+
// --- Provider Normalization ---
|
|
107
80
|
|
|
108
81
|
function normalize(url, body) {
|
|
109
82
|
let provider = 'unknown';
|
|
@@ -131,16 +104,13 @@ function normalize(url, body) {
|
|
|
131
104
|
return { provider, model };
|
|
132
105
|
}
|
|
133
106
|
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
*/
|
|
107
|
+
// --- The Monkey Patch ---
|
|
108
|
+
|
|
137
109
|
if (typeof GLOBAL_OBJ.fetch === 'function' && !GLOBAL_OBJ.fetch.__dropout_patched__) {
|
|
138
110
|
GLOBAL_OBJ.__dropout_original_fetch__ = GLOBAL_OBJ.fetch;
|
|
139
111
|
|
|
140
112
|
GLOBAL_OBJ.fetch = async function (input, init) {
|
|
141
|
-
const start = Date.now();
|
|
142
113
|
const url = typeof input === 'string' ? input : (input && input.url);
|
|
143
|
-
|
|
144
114
|
const isAI = url && (
|
|
145
115
|
url.includes('openai.com') ||
|
|
146
116
|
url.includes('anthropic.com') ||
|
|
@@ -149,49 +119,97 @@ if (typeof GLOBAL_OBJ.fetch === 'function' && !GLOBAL_OBJ.fetch.__dropout_patche
|
|
|
149
119
|
(init && init.body && (init.body.includes('"model"') || init.body.includes('"messages"')))
|
|
150
120
|
);
|
|
151
121
|
|
|
152
|
-
|
|
122
|
+
if (!isAI) return GLOBAL_OBJ.__dropout_original_fetch__(input, init);
|
|
123
|
+
|
|
124
|
+
const start = Date.now();
|
|
125
|
+
const turn = turnIndex++;
|
|
126
|
+
const { provider, model } = normalize(url, init && init.body);
|
|
127
|
+
|
|
128
|
+
// --- 1. Emit Request Event ---
|
|
129
|
+
let pText = "";
|
|
130
|
+
if (init && init.body) {
|
|
131
|
+
try { pText = typeof init.body === 'string' ? init.body : JSON.stringify(init.body); } catch (e) { }
|
|
132
|
+
}
|
|
133
|
+
const pHash = hash(pText);
|
|
134
|
+
const isRetry = pHash && pHash === lastPromptHash;
|
|
135
|
+
|
|
136
|
+
const requestEvent = {
|
|
137
|
+
identity: {
|
|
138
|
+
session_id: GLOBAL_OBJ.__dropout_session_id__,
|
|
139
|
+
turn_index: turn,
|
|
140
|
+
direction: 'request',
|
|
141
|
+
turn_role: 'user'
|
|
142
|
+
},
|
|
143
|
+
timing: {
|
|
144
|
+
created_at: Date.now()
|
|
145
|
+
},
|
|
146
|
+
provider_context: {
|
|
147
|
+
provider,
|
|
148
|
+
model
|
|
149
|
+
},
|
|
150
|
+
content: {
|
|
151
|
+
content_raw: config.privacyMode === 'full' ? pText : null,
|
|
152
|
+
content_hash: pHash
|
|
153
|
+
},
|
|
154
|
+
metadata_flags: {
|
|
155
|
+
retry_like: isRetry ? 1 : 0
|
|
156
|
+
}
|
|
157
|
+
};
|
|
158
|
+
|
|
159
|
+
emit(requestEvent);
|
|
160
|
+
lastPromptHash = pHash;
|
|
161
|
+
|
|
162
|
+
// Execute actual fetch
|
|
163
|
+
let response;
|
|
164
|
+
let oText = "";
|
|
165
|
+
try {
|
|
166
|
+
response = await GLOBAL_OBJ.__dropout_original_fetch__(input, init);
|
|
167
|
+
} catch (err) {
|
|
168
|
+
// Re-throw after giving it a chance to be reported if needed (though runtime usually silent)
|
|
169
|
+
throw err;
|
|
170
|
+
}
|
|
153
171
|
|
|
154
|
-
|
|
155
|
-
const latency = Date.now() - start;
|
|
156
|
-
const turn = turnIndex++;
|
|
157
|
-
const { provider, model } = normalize(url, init && init.body);
|
|
172
|
+
const latency = Date.now() - start;
|
|
158
173
|
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
174
|
+
// --- 2. Emit Response Event ---
|
|
175
|
+
try {
|
|
176
|
+
const cloned = response.clone();
|
|
177
|
+
oText = await cloned.text();
|
|
178
|
+
if (oText && oText.length > config.maxOutputBytes) {
|
|
179
|
+
oText = oText.slice(0, config.maxOutputBytes);
|
|
162
180
|
}
|
|
181
|
+
} catch (e) { }
|
|
163
182
|
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
const cloned = response.clone();
|
|
167
|
-
oText = await cloned.text();
|
|
168
|
-
if (oText && oText.length > config.maxOutputBytes) {
|
|
169
|
-
oText = oText.slice(0, config.maxOutputBytes);
|
|
170
|
-
}
|
|
171
|
-
} catch (e) { }
|
|
183
|
+
const oHash = hash(oText);
|
|
184
|
+
const isNonAdaptive = oHash && oHash === lastResponseHash;
|
|
172
185
|
|
|
173
|
-
|
|
186
|
+
const responseEvent = {
|
|
187
|
+
identity: {
|
|
174
188
|
session_id: GLOBAL_OBJ.__dropout_session_id__,
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
189
|
+
turn_index: turn,
|
|
190
|
+
direction: 'response',
|
|
191
|
+
turn_role: 'assistant'
|
|
192
|
+
},
|
|
193
|
+
timing: {
|
|
194
|
+
created_at: Date.now(),
|
|
195
|
+
latency_ms: latency
|
|
196
|
+
},
|
|
197
|
+
provider_context: {
|
|
178
198
|
provider,
|
|
179
|
-
model
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
payload.output_hash = getSemanticHash(oText);
|
|
189
|
-
payload.markers = { p: getMarkers(pText), o: getMarkers(oText) };
|
|
190
|
-
payload.flags = getFlags(pText + " " + oText);
|
|
199
|
+
model
|
|
200
|
+
},
|
|
201
|
+
content: {
|
|
202
|
+
content_raw: config.privacyMode === 'full' ? oText : null,
|
|
203
|
+
content_hash: oHash
|
|
204
|
+
},
|
|
205
|
+
metadata_flags: {
|
|
206
|
+
non_adaptive_response: isNonAdaptive ? 1 : 0,
|
|
207
|
+
turn_boundary_confirmed: 1
|
|
191
208
|
}
|
|
209
|
+
};
|
|
192
210
|
|
|
193
|
-
|
|
194
|
-
|
|
211
|
+
emit(responseEvent);
|
|
212
|
+
lastResponseHash = oHash;
|
|
195
213
|
|
|
196
214
|
return response;
|
|
197
215
|
};
|
|
@@ -204,73 +222,46 @@ if (typeof GLOBAL_OBJ.fetch === 'function' && !GLOBAL_OBJ.fetch.__dropout_patche
|
|
|
204
222
|
*/
|
|
205
223
|
async function capture(target, options = {}) {
|
|
206
224
|
const start = Date.now();
|
|
207
|
-
|
|
208
|
-
// Resolve target (function, promise, or static object)
|
|
209
|
-
let result;
|
|
210
|
-
if (typeof target === 'function') {
|
|
211
|
-
result = await target();
|
|
212
|
-
} else if (target && typeof target.then === 'function') {
|
|
213
|
-
result = await target;
|
|
214
|
-
} else {
|
|
215
|
-
// case: capture({ prompt, output })
|
|
216
|
-
const { prompt, output } = target;
|
|
217
|
-
const latency = options.latency || 0;
|
|
218
|
-
const turn = turnIndex++;
|
|
219
|
-
const mode = options.privacy || config.privacyMode;
|
|
220
|
-
|
|
221
|
-
const payload = {
|
|
222
|
-
session_id: GLOBAL_OBJ.__dropout_session_id__,
|
|
223
|
-
timestamp: Date.now(),
|
|
224
|
-
latency,
|
|
225
|
-
turn_position: turn,
|
|
226
|
-
provider: options.provider || 'manual',
|
|
227
|
-
model: options.model || 'unknown',
|
|
228
|
-
mode
|
|
229
|
-
};
|
|
230
|
-
|
|
231
|
-
if (mode === 'full') {
|
|
232
|
-
payload.prompt = prompt;
|
|
233
|
-
payload.output = output;
|
|
234
|
-
} else {
|
|
235
|
-
payload.prompt_hash = getSemanticHash(prompt);
|
|
236
|
-
payload.output_hash = getSemanticHash(output);
|
|
237
|
-
payload.markers = { p: getMarkers(prompt), o: getMarkers(output) };
|
|
238
|
-
payload.flags = getFlags((prompt || "") + " " + (output || ""));
|
|
239
|
-
}
|
|
240
|
-
|
|
241
|
-
emit(payload);
|
|
242
|
-
return output;
|
|
243
|
-
}
|
|
244
|
-
|
|
245
|
-
// Wrapped execution capture
|
|
246
|
-
const latency = Date.now() - start;
|
|
247
225
|
const turn = turnIndex++;
|
|
248
226
|
const mode = options.privacy || config.privacyMode;
|
|
249
|
-
const prompt = options.prompt;
|
|
250
|
-
const output = typeof result === 'string' ? result : JSON.stringify(result);
|
|
251
|
-
|
|
252
|
-
const payload = {
|
|
253
|
-
session_id: GLOBAL_OBJ.__dropout_session_id__,
|
|
254
|
-
timestamp: Date.now(),
|
|
255
|
-
latency,
|
|
256
|
-
turn_position: turn,
|
|
257
|
-
provider: options.provider || 'manual',
|
|
258
|
-
model: options.model || 'unknown',
|
|
259
|
-
mode
|
|
260
|
-
};
|
|
261
227
|
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
228
|
+
let prompt, output, latency_ms = options.latency || 0;
|
|
229
|
+
|
|
230
|
+
if (typeof target === 'function' || (target && typeof target.then === 'function')) {
|
|
231
|
+
prompt = options.prompt;
|
|
232
|
+
const result = typeof target === 'function' ? await target() : await target;
|
|
233
|
+
output = typeof result === 'string' ? result : JSON.stringify(result);
|
|
234
|
+
latency_ms = Date.now() - start;
|
|
265
235
|
} else {
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
payload.markers = { p: getMarkers(prompt), o: getMarkers(output) };
|
|
269
|
-
payload.flags = getFlags((prompt || "") + " " + (output || ""));
|
|
236
|
+
prompt = target.prompt;
|
|
237
|
+
output = target.output;
|
|
270
238
|
}
|
|
271
239
|
|
|
272
|
-
|
|
273
|
-
|
|
240
|
+
const pHash = hash(prompt);
|
|
241
|
+
const oHash = hash(output);
|
|
242
|
+
|
|
243
|
+
// Emit Request
|
|
244
|
+
emit({
|
|
245
|
+
identity: { session_id: GLOBAL_OBJ.__dropout_session_id__, turn_index: turn, direction: 'request', turn_role: 'user' },
|
|
246
|
+
timing: { created_at: Date.now() },
|
|
247
|
+
provider_context: { provider: options.provider || 'manual', model: options.model || 'unknown' },
|
|
248
|
+
content: { content_raw: mode === 'full' ? prompt : null, content_hash: pHash },
|
|
249
|
+
metadata_flags: { retry_like: pHash === lastPromptHash ? 1 : 0 }
|
|
250
|
+
});
|
|
251
|
+
|
|
252
|
+
// Emit Response
|
|
253
|
+
emit({
|
|
254
|
+
identity: { session_id: GLOBAL_OBJ.__dropout_session_id__, turn_index: turn, direction: 'response', turn_role: 'assistant' },
|
|
255
|
+
timing: { created_at: Date.now(), latency_ms },
|
|
256
|
+
provider_context: { provider: options.provider || 'manual', model: options.model || 'unknown' },
|
|
257
|
+
content: { content_raw: mode === 'full' ? output : null, content_hash: oHash },
|
|
258
|
+
metadata_flags: { non_adaptive_response: oHash === lastResponseHash ? 1 : 0, turn_boundary_confirmed: 1 }
|
|
259
|
+
});
|
|
260
|
+
|
|
261
|
+
lastPromptHash = pHash;
|
|
262
|
+
lastResponseHash = oHash;
|
|
263
|
+
|
|
264
|
+
return output;
|
|
274
265
|
}
|
|
275
266
|
|
|
276
267
|
module.exports = {
|
|
@@ -278,5 +269,7 @@ module.exports = {
|
|
|
278
269
|
reset: () => {
|
|
279
270
|
GLOBAL_OBJ.__dropout_session_id__ = generateSessionId();
|
|
280
271
|
turnIndex = 0;
|
|
272
|
+
lastPromptHash = null;
|
|
273
|
+
lastResponseHash = null;
|
|
281
274
|
}
|
|
282
275
|
};
|